blob: 370c7f4ec306a27e1a91b33fe6378e34f9153f09 [file] [log] [blame]
Michael Gottesman79d8d812013-01-28 01:35:51 +00001//===- ObjCARCOpts.cpp - ObjC ARC Optimization ----------------------------===//
John McCalld935e9c2011-06-15 23:37:01 +00002//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
Michael Gottesman97e3df02013-01-14 00:35:14 +00009/// \file
10/// This file defines ObjC ARC optimizations. ARC stands for Automatic
11/// Reference Counting and is a system for managing reference counts for objects
12/// in Objective C.
13///
14/// The optimizations performed include elimination of redundant, partially
15/// redundant, and inconsequential reference count operations, elimination of
16/// redundant weak pointer operations, pattern-matching and replacement of
17/// low-level operations into higher-level operations, and numerous minor
18/// simplifications.
19///
20/// This file also defines a simple ARC-aware AliasAnalysis.
21///
22/// WARNING: This file knows about certain library functions. It recognizes them
23/// by name, and hardwires knowledge of their semantics.
24///
25/// WARNING: This file knows about how certain Objective-C library functions are
26/// used. Naive LLVM IR transformations which would otherwise be
27/// behavior-preserving may break these assumptions.
28///
John McCalld935e9c2011-06-15 23:37:01 +000029//===----------------------------------------------------------------------===//
30
Michael Gottesman08904e32013-01-28 03:28:38 +000031#define DEBUG_TYPE "objc-arc-opts"
32#include "ObjCARC.h"
Michael Gottesman294e7da2013-01-28 05:51:54 +000033#include "ObjCARCAliasAnalysis.h"
Michael Gottesman778138e2013-01-29 03:03:03 +000034#include "ProvenanceAnalysis.h"
35#include "DependencyAnalysis.h"
Michael Gottesmanfa0939f2013-01-28 04:12:07 +000036
John McCalld935e9c2011-06-15 23:37:01 +000037#include "llvm/ADT/DenseMap.h"
Michael Gottesmanfa0939f2013-01-28 04:12:07 +000038#include "llvm/ADT/STLExtras.h"
Michael Gottesman778138e2013-01-29 03:03:03 +000039#include "llvm/ADT/SmallPtrSet.h"
40#include "llvm/Support/CFG.h"
Michael Gottesmanfa0939f2013-01-28 04:12:07 +000041
John McCalld935e9c2011-06-15 23:37:01 +000042using namespace llvm;
Michael Gottesman08904e32013-01-28 03:28:38 +000043using namespace llvm::objcarc;
John McCalld935e9c2011-06-15 23:37:01 +000044
Michael Gottesman97e3df02013-01-14 00:35:14 +000045/// \defgroup MiscUtils Miscellaneous utilities that are not ARC specific.
46/// @{
John McCalld935e9c2011-06-15 23:37:01 +000047
48namespace {
Michael Gottesman97e3df02013-01-14 00:35:14 +000049 /// \brief An associative container with fast insertion-order (deterministic)
50 /// iteration over its elements. Plus the special blot operation.
John McCalld935e9c2011-06-15 23:37:01 +000051 template<class KeyT, class ValueT>
52 class MapVector {
Michael Gottesman97e3df02013-01-14 00:35:14 +000053 /// Map keys to indices in Vector.
John McCalld935e9c2011-06-15 23:37:01 +000054 typedef DenseMap<KeyT, size_t> MapTy;
55 MapTy Map;
56
John McCalld935e9c2011-06-15 23:37:01 +000057 typedef std::vector<std::pair<KeyT, ValueT> > VectorTy;
Michael Gottesman97e3df02013-01-14 00:35:14 +000058 /// Keys and values.
John McCalld935e9c2011-06-15 23:37:01 +000059 VectorTy Vector;
60
61 public:
62 typedef typename VectorTy::iterator iterator;
63 typedef typename VectorTy::const_iterator const_iterator;
64 iterator begin() { return Vector.begin(); }
65 iterator end() { return Vector.end(); }
66 const_iterator begin() const { return Vector.begin(); }
67 const_iterator end() const { return Vector.end(); }
68
69#ifdef XDEBUG
70 ~MapVector() {
71 assert(Vector.size() >= Map.size()); // May differ due to blotting.
72 for (typename MapTy::const_iterator I = Map.begin(), E = Map.end();
73 I != E; ++I) {
74 assert(I->second < Vector.size());
75 assert(Vector[I->second].first == I->first);
76 }
77 for (typename VectorTy::const_iterator I = Vector.begin(),
78 E = Vector.end(); I != E; ++I)
79 assert(!I->first ||
80 (Map.count(I->first) &&
81 Map[I->first] == size_t(I - Vector.begin())));
82 }
83#endif
84
Dan Gohman55b06742012-03-02 01:13:53 +000085 ValueT &operator[](const KeyT &Arg) {
John McCalld935e9c2011-06-15 23:37:01 +000086 std::pair<typename MapTy::iterator, bool> Pair =
87 Map.insert(std::make_pair(Arg, size_t(0)));
88 if (Pair.second) {
Dan Gohman55b06742012-03-02 01:13:53 +000089 size_t Num = Vector.size();
90 Pair.first->second = Num;
John McCalld935e9c2011-06-15 23:37:01 +000091 Vector.push_back(std::make_pair(Arg, ValueT()));
Dan Gohman55b06742012-03-02 01:13:53 +000092 return Vector[Num].second;
John McCalld935e9c2011-06-15 23:37:01 +000093 }
94 return Vector[Pair.first->second].second;
95 }
96
97 std::pair<iterator, bool>
98 insert(const std::pair<KeyT, ValueT> &InsertPair) {
99 std::pair<typename MapTy::iterator, bool> Pair =
100 Map.insert(std::make_pair(InsertPair.first, size_t(0)));
101 if (Pair.second) {
Dan Gohman55b06742012-03-02 01:13:53 +0000102 size_t Num = Vector.size();
103 Pair.first->second = Num;
John McCalld935e9c2011-06-15 23:37:01 +0000104 Vector.push_back(InsertPair);
Dan Gohman55b06742012-03-02 01:13:53 +0000105 return std::make_pair(Vector.begin() + Num, true);
John McCalld935e9c2011-06-15 23:37:01 +0000106 }
107 return std::make_pair(Vector.begin() + Pair.first->second, false);
108 }
109
Dan Gohman55b06742012-03-02 01:13:53 +0000110 const_iterator find(const KeyT &Key) const {
John McCalld935e9c2011-06-15 23:37:01 +0000111 typename MapTy::const_iterator It = Map.find(Key);
112 if (It == Map.end()) return Vector.end();
113 return Vector.begin() + It->second;
114 }
115
Michael Gottesman97e3df02013-01-14 00:35:14 +0000116 /// This is similar to erase, but instead of removing the element from the
117 /// vector, it just zeros out the key in the vector. This leaves iterators
118 /// intact, but clients must be prepared for zeroed-out keys when iterating.
Dan Gohman55b06742012-03-02 01:13:53 +0000119 void blot(const KeyT &Key) {
John McCalld935e9c2011-06-15 23:37:01 +0000120 typename MapTy::iterator It = Map.find(Key);
121 if (It == Map.end()) return;
122 Vector[It->second].first = KeyT();
123 Map.erase(It);
124 }
125
126 void clear() {
127 Map.clear();
128 Vector.clear();
129 }
130 };
131}
132
Michael Gottesman97e3df02013-01-14 00:35:14 +0000133/// @}
134///
135/// \defgroup ARCUtilities Utility declarations/definitions specific to ARC.
136/// @{
John McCalld935e9c2011-06-15 23:37:01 +0000137
Michael Gottesman97e3df02013-01-14 00:35:14 +0000138/// \brief This is similar to StripPointerCastsAndObjCCalls but it stops as soon
139/// as it finds a value with multiple uses.
John McCalld935e9c2011-06-15 23:37:01 +0000140static const Value *FindSingleUseIdentifiedObject(const Value *Arg) {
141 if (Arg->hasOneUse()) {
142 if (const BitCastInst *BC = dyn_cast<BitCastInst>(Arg))
143 return FindSingleUseIdentifiedObject(BC->getOperand(0));
144 if (const GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(Arg))
145 if (GEP->hasAllZeroIndices())
146 return FindSingleUseIdentifiedObject(GEP->getPointerOperand());
147 if (IsForwarding(GetBasicInstructionClass(Arg)))
148 return FindSingleUseIdentifiedObject(
149 cast<CallInst>(Arg)->getArgOperand(0));
150 if (!IsObjCIdentifiedObject(Arg))
151 return 0;
152 return Arg;
153 }
154
Dan Gohman41375a32012-05-08 23:39:44 +0000155 // If we found an identifiable object but it has multiple uses, but they are
156 // trivial uses, we can still consider this to be a single-use value.
John McCalld935e9c2011-06-15 23:37:01 +0000157 if (IsObjCIdentifiedObject(Arg)) {
158 for (Value::const_use_iterator UI = Arg->use_begin(), UE = Arg->use_end();
159 UI != UE; ++UI) {
160 const User *U = *UI;
161 if (!U->use_empty() || StripPointerCastsAndObjCCalls(U) != Arg)
162 return 0;
163 }
164
165 return Arg;
166 }
167
168 return 0;
169}
170
Michael Gottesman4385edf2013-01-14 01:47:53 +0000171/// \brief Test whether the given pointer, which is an Objective C block
172/// pointer, does not "escape".
Michael Gottesman97e3df02013-01-14 00:35:14 +0000173///
174/// This differs from regular escape analysis in that a use as an
175/// argument to a call is not considered an escape.
176///
Dan Gohman728db492012-01-13 00:39:07 +0000177static bool DoesObjCBlockEscape(const Value *BlockPtr) {
Michael Gottesman1a89fe52013-01-13 07:47:32 +0000178
179 DEBUG(dbgs() << "DoesObjCBlockEscape: Target: " << *BlockPtr << "\n");
180
Dan Gohman728db492012-01-13 00:39:07 +0000181 // Walk the def-use chains.
182 SmallVector<const Value *, 4> Worklist;
183 Worklist.push_back(BlockPtr);
Michael Gottesmanf15c0bb2013-01-13 22:12:06 +0000184
185 // Ensure we do not visit any value twice.
186 SmallPtrSet<const Value *, 4> VisitedSet;
187
Dan Gohman728db492012-01-13 00:39:07 +0000188 do {
189 const Value *V = Worklist.pop_back_val();
Michael Gottesman1a89fe52013-01-13 07:47:32 +0000190
191 DEBUG(dbgs() << "DoesObjCBlockEscape: Visiting: " << *V << "\n");
192
Dan Gohman728db492012-01-13 00:39:07 +0000193 for (Value::const_use_iterator UI = V->use_begin(), UE = V->use_end();
194 UI != UE; ++UI) {
195 const User *UUser = *UI;
Michael Gottesman1a89fe52013-01-13 07:47:32 +0000196
197 DEBUG(dbgs() << "DoesObjCBlockEscape: User: " << *UUser << "\n");
198
Dan Gohman728db492012-01-13 00:39:07 +0000199 // Special - Use by a call (callee or argument) is not considered
200 // to be an escape.
Dan Gohmane1e352a2012-04-13 18:28:58 +0000201 switch (GetBasicInstructionClass(UUser)) {
202 case IC_StoreWeak:
203 case IC_InitWeak:
204 case IC_StoreStrong:
205 case IC_Autorelease:
Michael Gottesman1a89fe52013-01-13 07:47:32 +0000206 case IC_AutoreleaseRV: {
207 DEBUG(dbgs() << "DoesObjCBlockEscape: User copies pointer arguments. "
208 "Block Escapes!\n");
Dan Gohmane1e352a2012-04-13 18:28:58 +0000209 // These special functions make copies of their pointer arguments.
210 return true;
Michael Gottesman1a89fe52013-01-13 07:47:32 +0000211 }
Dan Gohmane1e352a2012-04-13 18:28:58 +0000212 case IC_User:
213 case IC_None:
214 // Use by an instruction which copies the value is an escape if the
215 // result is an escape.
216 if (isa<BitCastInst>(UUser) || isa<GetElementPtrInst>(UUser) ||
217 isa<PHINode>(UUser) || isa<SelectInst>(UUser)) {
Michael Gottesmanf15c0bb2013-01-13 22:12:06 +0000218
Michael Gottesmane9145d32013-01-14 19:18:39 +0000219 if (!VisitedSet.insert(UUser)) {
Michael Gottesman4385edf2013-01-14 01:47:53 +0000220 DEBUG(dbgs() << "DoesObjCBlockEscape: User copies value. Escapes "
221 "if result escapes. Adding to list.\n");
Michael Gottesmanf15c0bb2013-01-13 22:12:06 +0000222 Worklist.push_back(UUser);
223 } else {
224 DEBUG(dbgs() << "DoesObjCBlockEscape: Already visited node.\n");
225 }
Dan Gohmane1e352a2012-04-13 18:28:58 +0000226 continue;
227 }
228 // Use by a load is not an escape.
229 if (isa<LoadInst>(UUser))
230 continue;
231 // Use by a store is not an escape if the use is the address.
232 if (const StoreInst *SI = dyn_cast<StoreInst>(UUser))
233 if (V != SI->getValueOperand())
234 continue;
235 break;
236 default:
237 // Regular calls and other stuff are not considered escapes.
Dan Gohman728db492012-01-13 00:39:07 +0000238 continue;
239 }
Dan Gohmaneb6e0152012-02-13 22:57:02 +0000240 // Otherwise, conservatively assume an escape.
Michael Gottesman1a89fe52013-01-13 07:47:32 +0000241 DEBUG(dbgs() << "DoesObjCBlockEscape: Assuming block escapes.\n");
Dan Gohman728db492012-01-13 00:39:07 +0000242 return true;
243 }
244 } while (!Worklist.empty());
245
246 // No escapes found.
Michael Gottesman1a89fe52013-01-13 07:47:32 +0000247 DEBUG(dbgs() << "DoesObjCBlockEscape: Block does not escape.\n");
Dan Gohman728db492012-01-13 00:39:07 +0000248 return false;
249}
250
Michael Gottesman97e3df02013-01-14 00:35:14 +0000251/// @}
252///
Michael Gottesman97e3df02013-01-14 00:35:14 +0000253/// \defgroup ARCOpt ARC Optimization.
254/// @{
John McCalld935e9c2011-06-15 23:37:01 +0000255
256// TODO: On code like this:
257//
258// objc_retain(%x)
259// stuff_that_cannot_release()
260// objc_autorelease(%x)
261// stuff_that_cannot_release()
262// objc_retain(%x)
263// stuff_that_cannot_release()
264// objc_autorelease(%x)
265//
266// The second retain and autorelease can be deleted.
267
268// TODO: It should be possible to delete
269// objc_autoreleasePoolPush and objc_autoreleasePoolPop
270// pairs if nothing is actually autoreleased between them. Also, autorelease
271// calls followed by objc_autoreleasePoolPop calls (perhaps in ObjC++ code
272// after inlining) can be turned into plain release calls.
273
274// TODO: Critical-edge splitting. If the optimial insertion point is
275// a critical edge, the current algorithm has to fail, because it doesn't
276// know how to split edges. It should be possible to make the optimizer
277// think in terms of edges, rather than blocks, and then split critical
278// edges on demand.
279
280// TODO: OptimizeSequences could generalized to be Interprocedural.
281
282// TODO: Recognize that a bunch of other objc runtime calls have
283// non-escaping arguments and non-releasing arguments, and may be
284// non-autoreleasing.
285
286// TODO: Sink autorelease calls as far as possible. Unfortunately we
287// usually can't sink them past other calls, which would be the main
288// case where it would be useful.
289
Dan Gohmanb3894012011-08-19 00:26:36 +0000290// TODO: The pointer returned from objc_loadWeakRetained is retained.
291
292// TODO: Delete release+retain pairs (rare).
Dan Gohmanceaac7c2011-06-20 23:20:43 +0000293
Chandler Carruthed0881b2012-12-03 16:50:05 +0000294#include "llvm/ADT/SmallPtrSet.h"
295#include "llvm/ADT/Statistic.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +0000296#include "llvm/IR/LLVMContext.h"
John McCalld935e9c2011-06-15 23:37:01 +0000297
298STATISTIC(NumNoops, "Number of no-op objc calls eliminated");
299STATISTIC(NumPartialNoops, "Number of partially no-op objc calls eliminated");
300STATISTIC(NumAutoreleases,"Number of autoreleases converted to releases");
301STATISTIC(NumRets, "Number of return value forwarding "
302 "retain+autoreleaes eliminated");
303STATISTIC(NumRRs, "Number of retain+release paths eliminated");
304STATISTIC(NumPeeps, "Number of calls peephole-optimized");
305
306namespace {
Michael Gottesman97e3df02013-01-14 00:35:14 +0000307 /// \enum Sequence
308 ///
309 /// \brief A sequence of states that a pointer may go through in which an
310 /// objc_retain and objc_release are actually needed.
John McCalld935e9c2011-06-15 23:37:01 +0000311 enum Sequence {
312 S_None,
313 S_Retain, ///< objc_retain(x)
314 S_CanRelease, ///< foo(x) -- x could possibly see a ref count decrement
315 S_Use, ///< any use of x
316 S_Stop, ///< like S_Release, but code motion is stopped
317 S_Release, ///< objc_release(x)
318 S_MovableRelease ///< objc_release(x), !clang.imprecise_release
319 };
320}
321
322static Sequence MergeSeqs(Sequence A, Sequence B, bool TopDown) {
323 // The easy cases.
324 if (A == B)
325 return A;
326 if (A == S_None || B == S_None)
327 return S_None;
328
John McCalld935e9c2011-06-15 23:37:01 +0000329 if (A > B) std::swap(A, B);
330 if (TopDown) {
331 // Choose the side which is further along in the sequence.
Dan Gohman12130272011-08-12 00:26:31 +0000332 if ((A == S_Retain || A == S_CanRelease) &&
333 (B == S_CanRelease || B == S_Use))
John McCalld935e9c2011-06-15 23:37:01 +0000334 return B;
335 } else {
336 // Choose the side which is further along in the sequence.
337 if ((A == S_Use || A == S_CanRelease) &&
Dan Gohman12130272011-08-12 00:26:31 +0000338 (B == S_Use || B == S_Release || B == S_Stop || B == S_MovableRelease))
John McCalld935e9c2011-06-15 23:37:01 +0000339 return A;
340 // If both sides are releases, choose the more conservative one.
341 if (A == S_Stop && (B == S_Release || B == S_MovableRelease))
342 return A;
343 if (A == S_Release && B == S_MovableRelease)
344 return A;
345 }
346
347 return S_None;
348}
349
350namespace {
Michael Gottesman97e3df02013-01-14 00:35:14 +0000351 /// \brief Unidirectional information about either a
John McCalld935e9c2011-06-15 23:37:01 +0000352 /// retain-decrement-use-release sequence or release-use-decrement-retain
353 /// reverese sequence.
354 struct RRInfo {
Michael Gottesman97e3df02013-01-14 00:35:14 +0000355 /// After an objc_retain, the reference count of the referenced
Dan Gohmanb3894012011-08-19 00:26:36 +0000356 /// object is known to be positive. Similarly, before an objc_release, the
357 /// reference count of the referenced object is known to be positive. If
358 /// there are retain-release pairs in code regions where the retain count
359 /// is known to be positive, they can be eliminated, regardless of any side
360 /// effects between them.
361 ///
362 /// Also, a retain+release pair nested within another retain+release
363 /// pair all on the known same pointer value can be eliminated, regardless
364 /// of any intervening side effects.
365 ///
366 /// KnownSafe is true when either of these conditions is satisfied.
367 bool KnownSafe;
John McCalld935e9c2011-06-15 23:37:01 +0000368
Michael Gottesman97e3df02013-01-14 00:35:14 +0000369 /// True if the Calls are objc_retainBlock calls (as opposed to objc_retain
370 /// calls).
John McCalld935e9c2011-06-15 23:37:01 +0000371 bool IsRetainBlock;
372
Michael Gottesman97e3df02013-01-14 00:35:14 +0000373 /// True of the objc_release calls are all marked with the "tail" keyword.
John McCalld935e9c2011-06-15 23:37:01 +0000374 bool IsTailCallRelease;
375
Michael Gottesman97e3df02013-01-14 00:35:14 +0000376 /// If the Calls are objc_release calls and they all have a
377 /// clang.imprecise_release tag, this is the metadata tag.
John McCalld935e9c2011-06-15 23:37:01 +0000378 MDNode *ReleaseMetadata;
379
Michael Gottesman97e3df02013-01-14 00:35:14 +0000380 /// For a top-down sequence, the set of objc_retains or
John McCalld935e9c2011-06-15 23:37:01 +0000381 /// objc_retainBlocks. For bottom-up, the set of objc_releases.
382 SmallPtrSet<Instruction *, 2> Calls;
383
Michael Gottesman97e3df02013-01-14 00:35:14 +0000384 /// The set of optimal insert positions for moving calls in the opposite
385 /// sequence.
John McCalld935e9c2011-06-15 23:37:01 +0000386 SmallPtrSet<Instruction *, 2> ReverseInsertPts;
387
388 RRInfo() :
Dan Gohman728db492012-01-13 00:39:07 +0000389 KnownSafe(false), IsRetainBlock(false),
Dan Gohman62079b42012-04-25 00:50:46 +0000390 IsTailCallRelease(false),
John McCalld935e9c2011-06-15 23:37:01 +0000391 ReleaseMetadata(0) {}
392
393 void clear();
394 };
395}
396
397void RRInfo::clear() {
Dan Gohmanb3894012011-08-19 00:26:36 +0000398 KnownSafe = false;
John McCalld935e9c2011-06-15 23:37:01 +0000399 IsRetainBlock = false;
400 IsTailCallRelease = false;
401 ReleaseMetadata = 0;
402 Calls.clear();
403 ReverseInsertPts.clear();
404}
405
406namespace {
Michael Gottesman97e3df02013-01-14 00:35:14 +0000407 /// \brief This class summarizes several per-pointer runtime properties which
408 /// are propogated through the flow graph.
John McCalld935e9c2011-06-15 23:37:01 +0000409 class PtrState {
Michael Gottesman97e3df02013-01-14 00:35:14 +0000410 /// True if the reference count is known to be incremented.
Dan Gohman62079b42012-04-25 00:50:46 +0000411 bool KnownPositiveRefCount;
412
Michael Gottesman97e3df02013-01-14 00:35:14 +0000413 /// True of we've seen an opportunity for partial RR elimination, such as
414 /// pushing calls into a CFG triangle or into one side of a CFG diamond.
Dan Gohman62079b42012-04-25 00:50:46 +0000415 bool Partial;
John McCalld935e9c2011-06-15 23:37:01 +0000416
Michael Gottesman97e3df02013-01-14 00:35:14 +0000417 /// The current position in the sequence.
Dan Gohman41375a32012-05-08 23:39:44 +0000418 Sequence Seq : 8;
John McCalld935e9c2011-06-15 23:37:01 +0000419
420 public:
Michael Gottesman97e3df02013-01-14 00:35:14 +0000421 /// Unidirectional information about the current sequence.
422 ///
John McCalld935e9c2011-06-15 23:37:01 +0000423 /// TODO: Encapsulate this better.
424 RRInfo RRI;
425
Dan Gohmandf476e52012-09-04 23:16:20 +0000426 PtrState() : KnownPositiveRefCount(false), Partial(false),
Dan Gohman41375a32012-05-08 23:39:44 +0000427 Seq(S_None) {}
John McCalld935e9c2011-06-15 23:37:01 +0000428
Dan Gohman62079b42012-04-25 00:50:46 +0000429 void SetKnownPositiveRefCount() {
430 KnownPositiveRefCount = true;
Dan Gohman12130272011-08-12 00:26:31 +0000431 }
432
Dan Gohman62079b42012-04-25 00:50:46 +0000433 void ClearRefCount() {
434 KnownPositiveRefCount = false;
John McCalld935e9c2011-06-15 23:37:01 +0000435 }
436
John McCalld935e9c2011-06-15 23:37:01 +0000437 bool IsKnownIncremented() const {
Dan Gohman62079b42012-04-25 00:50:46 +0000438 return KnownPositiveRefCount;
John McCalld935e9c2011-06-15 23:37:01 +0000439 }
440
441 void SetSeq(Sequence NewSeq) {
442 Seq = NewSeq;
443 }
444
John McCalld935e9c2011-06-15 23:37:01 +0000445 Sequence GetSeq() const {
446 return Seq;
447 }
448
449 void ClearSequenceProgress() {
Dan Gohman62079b42012-04-25 00:50:46 +0000450 ResetSequenceProgress(S_None);
451 }
452
453 void ResetSequenceProgress(Sequence NewSeq) {
454 Seq = NewSeq;
455 Partial = false;
John McCalld935e9c2011-06-15 23:37:01 +0000456 RRI.clear();
457 }
458
459 void Merge(const PtrState &Other, bool TopDown);
460 };
461}
462
463void
464PtrState::Merge(const PtrState &Other, bool TopDown) {
465 Seq = MergeSeqs(Seq, Other.Seq, TopDown);
Dan Gohman62079b42012-04-25 00:50:46 +0000466 KnownPositiveRefCount = KnownPositiveRefCount && Other.KnownPositiveRefCount;
John McCalld935e9c2011-06-15 23:37:01 +0000467
468 // We can't merge a plain objc_retain with an objc_retainBlock.
469 if (RRI.IsRetainBlock != Other.RRI.IsRetainBlock)
470 Seq = S_None;
471
Dan Gohman1736c142011-10-17 18:48:25 +0000472 // If we're not in a sequence (anymore), drop all associated state.
John McCalld935e9c2011-06-15 23:37:01 +0000473 if (Seq == S_None) {
Dan Gohman62079b42012-04-25 00:50:46 +0000474 Partial = false;
John McCalld935e9c2011-06-15 23:37:01 +0000475 RRI.clear();
Dan Gohman62079b42012-04-25 00:50:46 +0000476 } else if (Partial || Other.Partial) {
Dan Gohman1736c142011-10-17 18:48:25 +0000477 // If we're doing a merge on a path that's previously seen a partial
478 // merge, conservatively drop the sequence, to avoid doing partial
479 // RR elimination. If the branch predicates for the two merge differ,
480 // mixing them is unsafe.
Dan Gohman62079b42012-04-25 00:50:46 +0000481 ClearSequenceProgress();
John McCalld935e9c2011-06-15 23:37:01 +0000482 } else {
483 // Conservatively merge the ReleaseMetadata information.
484 if (RRI.ReleaseMetadata != Other.RRI.ReleaseMetadata)
485 RRI.ReleaseMetadata = 0;
486
Dan Gohmanb3894012011-08-19 00:26:36 +0000487 RRI.KnownSafe = RRI.KnownSafe && Other.RRI.KnownSafe;
Dan Gohman41375a32012-05-08 23:39:44 +0000488 RRI.IsTailCallRelease = RRI.IsTailCallRelease &&
489 Other.RRI.IsTailCallRelease;
John McCalld935e9c2011-06-15 23:37:01 +0000490 RRI.Calls.insert(Other.RRI.Calls.begin(), Other.RRI.Calls.end());
Dan Gohman1736c142011-10-17 18:48:25 +0000491
492 // Merge the insert point sets. If there are any differences,
493 // that makes this a partial merge.
Dan Gohman41375a32012-05-08 23:39:44 +0000494 Partial = RRI.ReverseInsertPts.size() != Other.RRI.ReverseInsertPts.size();
Dan Gohman1736c142011-10-17 18:48:25 +0000495 for (SmallPtrSet<Instruction *, 2>::const_iterator
496 I = Other.RRI.ReverseInsertPts.begin(),
497 E = Other.RRI.ReverseInsertPts.end(); I != E; ++I)
Dan Gohman62079b42012-04-25 00:50:46 +0000498 Partial |= RRI.ReverseInsertPts.insert(*I);
John McCalld935e9c2011-06-15 23:37:01 +0000499 }
500}
501
502namespace {
Michael Gottesman97e3df02013-01-14 00:35:14 +0000503 /// \brief Per-BasicBlock state.
John McCalld935e9c2011-06-15 23:37:01 +0000504 class BBState {
Michael Gottesman97e3df02013-01-14 00:35:14 +0000505 /// The number of unique control paths from the entry which can reach this
506 /// block.
John McCalld935e9c2011-06-15 23:37:01 +0000507 unsigned TopDownPathCount;
508
Michael Gottesman97e3df02013-01-14 00:35:14 +0000509 /// The number of unique control paths to exits from this block.
John McCalld935e9c2011-06-15 23:37:01 +0000510 unsigned BottomUpPathCount;
511
Michael Gottesman97e3df02013-01-14 00:35:14 +0000512 /// A type for PerPtrTopDown and PerPtrBottomUp.
John McCalld935e9c2011-06-15 23:37:01 +0000513 typedef MapVector<const Value *, PtrState> MapTy;
514
Michael Gottesman97e3df02013-01-14 00:35:14 +0000515 /// The top-down traversal uses this to record information known about a
516 /// pointer at the bottom of each block.
John McCalld935e9c2011-06-15 23:37:01 +0000517 MapTy PerPtrTopDown;
518
Michael Gottesman97e3df02013-01-14 00:35:14 +0000519 /// The bottom-up traversal uses this to record information known about a
520 /// pointer at the top of each block.
John McCalld935e9c2011-06-15 23:37:01 +0000521 MapTy PerPtrBottomUp;
522
Michael Gottesman97e3df02013-01-14 00:35:14 +0000523 /// Effective predecessors of the current block ignoring ignorable edges and
524 /// ignored backedges.
Dan Gohmanc24c66f2012-04-24 22:53:18 +0000525 SmallVector<BasicBlock *, 2> Preds;
Michael Gottesman97e3df02013-01-14 00:35:14 +0000526 /// Effective successors of the current block ignoring ignorable edges and
527 /// ignored backedges.
Dan Gohmanc24c66f2012-04-24 22:53:18 +0000528 SmallVector<BasicBlock *, 2> Succs;
529
John McCalld935e9c2011-06-15 23:37:01 +0000530 public:
531 BBState() : TopDownPathCount(0), BottomUpPathCount(0) {}
532
533 typedef MapTy::iterator ptr_iterator;
534 typedef MapTy::const_iterator ptr_const_iterator;
535
536 ptr_iterator top_down_ptr_begin() { return PerPtrTopDown.begin(); }
537 ptr_iterator top_down_ptr_end() { return PerPtrTopDown.end(); }
538 ptr_const_iterator top_down_ptr_begin() const {
539 return PerPtrTopDown.begin();
540 }
541 ptr_const_iterator top_down_ptr_end() const {
542 return PerPtrTopDown.end();
543 }
544
545 ptr_iterator bottom_up_ptr_begin() { return PerPtrBottomUp.begin(); }
546 ptr_iterator bottom_up_ptr_end() { return PerPtrBottomUp.end(); }
547 ptr_const_iterator bottom_up_ptr_begin() const {
548 return PerPtrBottomUp.begin();
549 }
550 ptr_const_iterator bottom_up_ptr_end() const {
551 return PerPtrBottomUp.end();
552 }
553
Michael Gottesman97e3df02013-01-14 00:35:14 +0000554 /// Mark this block as being an entry block, which has one path from the
555 /// entry by definition.
John McCalld935e9c2011-06-15 23:37:01 +0000556 void SetAsEntry() { TopDownPathCount = 1; }
557
Michael Gottesman97e3df02013-01-14 00:35:14 +0000558 /// Mark this block as being an exit block, which has one path to an exit by
559 /// definition.
John McCalld935e9c2011-06-15 23:37:01 +0000560 void SetAsExit() { BottomUpPathCount = 1; }
561
562 PtrState &getPtrTopDownState(const Value *Arg) {
563 return PerPtrTopDown[Arg];
564 }
565
566 PtrState &getPtrBottomUpState(const Value *Arg) {
567 return PerPtrBottomUp[Arg];
568 }
569
570 void clearBottomUpPointers() {
Evan Chenge4df6a22011-08-04 18:40:26 +0000571 PerPtrBottomUp.clear();
John McCalld935e9c2011-06-15 23:37:01 +0000572 }
573
574 void clearTopDownPointers() {
575 PerPtrTopDown.clear();
576 }
577
578 void InitFromPred(const BBState &Other);
579 void InitFromSucc(const BBState &Other);
580 void MergePred(const BBState &Other);
581 void MergeSucc(const BBState &Other);
582
Michael Gottesman97e3df02013-01-14 00:35:14 +0000583 /// Return the number of possible unique paths from an entry to an exit
584 /// which pass through this block. This is only valid after both the
585 /// top-down and bottom-up traversals are complete.
John McCalld935e9c2011-06-15 23:37:01 +0000586 unsigned GetAllPathCount() const {
Dan Gohmanc24c66f2012-04-24 22:53:18 +0000587 assert(TopDownPathCount != 0);
588 assert(BottomUpPathCount != 0);
John McCalld935e9c2011-06-15 23:37:01 +0000589 return TopDownPathCount * BottomUpPathCount;
590 }
Dan Gohman12130272011-08-12 00:26:31 +0000591
Dan Gohmanc24c66f2012-04-24 22:53:18 +0000592 // Specialized CFG utilities.
Dan Gohmandae33492012-04-27 18:56:31 +0000593 typedef SmallVectorImpl<BasicBlock *>::const_iterator edge_iterator;
Dan Gohmanc24c66f2012-04-24 22:53:18 +0000594 edge_iterator pred_begin() { return Preds.begin(); }
595 edge_iterator pred_end() { return Preds.end(); }
596 edge_iterator succ_begin() { return Succs.begin(); }
597 edge_iterator succ_end() { return Succs.end(); }
598
599 void addSucc(BasicBlock *Succ) { Succs.push_back(Succ); }
600 void addPred(BasicBlock *Pred) { Preds.push_back(Pred); }
601
602 bool isExit() const { return Succs.empty(); }
John McCalld935e9c2011-06-15 23:37:01 +0000603 };
604}
605
606void BBState::InitFromPred(const BBState &Other) {
607 PerPtrTopDown = Other.PerPtrTopDown;
608 TopDownPathCount = Other.TopDownPathCount;
609}
610
611void BBState::InitFromSucc(const BBState &Other) {
612 PerPtrBottomUp = Other.PerPtrBottomUp;
613 BottomUpPathCount = Other.BottomUpPathCount;
614}
615
Michael Gottesman97e3df02013-01-14 00:35:14 +0000616/// The top-down traversal uses this to merge information about predecessors to
617/// form the initial state for a new block.
John McCalld935e9c2011-06-15 23:37:01 +0000618void BBState::MergePred(const BBState &Other) {
619 // Other.TopDownPathCount can be 0, in which case it is either dead or a
620 // loop backedge. Loop backedges are special.
621 TopDownPathCount += Other.TopDownPathCount;
622
Michael Gottesman4385edf2013-01-14 01:47:53 +0000623 // Check for overflow. If we have overflow, fall back to conservative
624 // behavior.
Dan Gohman7c84dad2012-09-12 20:45:17 +0000625 if (TopDownPathCount < Other.TopDownPathCount) {
626 clearTopDownPointers();
627 return;
628 }
629
John McCalld935e9c2011-06-15 23:37:01 +0000630 // For each entry in the other set, if our set has an entry with the same key,
631 // merge the entries. Otherwise, copy the entry and merge it with an empty
632 // entry.
633 for (ptr_const_iterator MI = Other.top_down_ptr_begin(),
634 ME = Other.top_down_ptr_end(); MI != ME; ++MI) {
635 std::pair<ptr_iterator, bool> Pair = PerPtrTopDown.insert(*MI);
636 Pair.first->second.Merge(Pair.second ? PtrState() : MI->second,
637 /*TopDown=*/true);
638 }
639
Dan Gohman7e315fc32011-08-11 21:06:32 +0000640 // For each entry in our set, if the other set doesn't have an entry with the
John McCalld935e9c2011-06-15 23:37:01 +0000641 // same key, force it to merge with an empty entry.
642 for (ptr_iterator MI = top_down_ptr_begin(),
643 ME = top_down_ptr_end(); MI != ME; ++MI)
644 if (Other.PerPtrTopDown.find(MI->first) == Other.PerPtrTopDown.end())
645 MI->second.Merge(PtrState(), /*TopDown=*/true);
646}
647
Michael Gottesman97e3df02013-01-14 00:35:14 +0000648/// The bottom-up traversal uses this to merge information about successors to
649/// form the initial state for a new block.
John McCalld935e9c2011-06-15 23:37:01 +0000650void BBState::MergeSucc(const BBState &Other) {
651 // Other.BottomUpPathCount can be 0, in which case it is either dead or a
652 // loop backedge. Loop backedges are special.
653 BottomUpPathCount += Other.BottomUpPathCount;
654
Michael Gottesman4385edf2013-01-14 01:47:53 +0000655 // Check for overflow. If we have overflow, fall back to conservative
656 // behavior.
Dan Gohman7c84dad2012-09-12 20:45:17 +0000657 if (BottomUpPathCount < Other.BottomUpPathCount) {
658 clearBottomUpPointers();
659 return;
660 }
661
John McCalld935e9c2011-06-15 23:37:01 +0000662 // For each entry in the other set, if our set has an entry with the
663 // same key, merge the entries. Otherwise, copy the entry and merge
664 // it with an empty entry.
665 for (ptr_const_iterator MI = Other.bottom_up_ptr_begin(),
666 ME = Other.bottom_up_ptr_end(); MI != ME; ++MI) {
667 std::pair<ptr_iterator, bool> Pair = PerPtrBottomUp.insert(*MI);
668 Pair.first->second.Merge(Pair.second ? PtrState() : MI->second,
669 /*TopDown=*/false);
670 }
671
Dan Gohman7e315fc32011-08-11 21:06:32 +0000672 // For each entry in our set, if the other set doesn't have an entry
John McCalld935e9c2011-06-15 23:37:01 +0000673 // with the same key, force it to merge with an empty entry.
674 for (ptr_iterator MI = bottom_up_ptr_begin(),
675 ME = bottom_up_ptr_end(); MI != ME; ++MI)
676 if (Other.PerPtrBottomUp.find(MI->first) == Other.PerPtrBottomUp.end())
677 MI->second.Merge(PtrState(), /*TopDown=*/false);
678}
679
680namespace {
Michael Gottesman97e3df02013-01-14 00:35:14 +0000681 /// \brief The main ARC optimization pass.
John McCalld935e9c2011-06-15 23:37:01 +0000682 class ObjCARCOpt : public FunctionPass {
683 bool Changed;
684 ProvenanceAnalysis PA;
685
Michael Gottesman97e3df02013-01-14 00:35:14 +0000686 /// A flag indicating whether this optimization pass should run.
Dan Gohmanceaac7c2011-06-20 23:20:43 +0000687 bool Run;
688
Michael Gottesman97e3df02013-01-14 00:35:14 +0000689 /// Declarations for ObjC runtime functions, for use in creating calls to
690 /// them. These are initialized lazily to avoid cluttering up the Module
691 /// with unused declarations.
John McCalld935e9c2011-06-15 23:37:01 +0000692
Michael Gottesman97e3df02013-01-14 00:35:14 +0000693 /// Declaration for ObjC runtime function
694 /// objc_retainAutoreleasedReturnValue.
695 Constant *RetainRVCallee;
696 /// Declaration for ObjC runtime function objc_autoreleaseReturnValue.
697 Constant *AutoreleaseRVCallee;
698 /// Declaration for ObjC runtime function objc_release.
699 Constant *ReleaseCallee;
700 /// Declaration for ObjC runtime function objc_retain.
701 Constant *RetainCallee;
702 /// Declaration for ObjC runtime function objc_retainBlock.
703 Constant *RetainBlockCallee;
704 /// Declaration for ObjC runtime function objc_autorelease.
705 Constant *AutoreleaseCallee;
706
707 /// Flags which determine whether each of the interesting runtine functions
708 /// is in fact used in the current function.
John McCalld935e9c2011-06-15 23:37:01 +0000709 unsigned UsedInThisFunction;
710
Michael Gottesman97e3df02013-01-14 00:35:14 +0000711 /// The Metadata Kind for clang.imprecise_release metadata.
John McCalld935e9c2011-06-15 23:37:01 +0000712 unsigned ImpreciseReleaseMDKind;
713
Michael Gottesman97e3df02013-01-14 00:35:14 +0000714 /// The Metadata Kind for clang.arc.copy_on_escape metadata.
Dan Gohmana7107f92011-10-17 22:53:25 +0000715 unsigned CopyOnEscapeMDKind;
716
Michael Gottesman97e3df02013-01-14 00:35:14 +0000717 /// The Metadata Kind for clang.arc.no_objc_arc_exceptions metadata.
Dan Gohman0155f302012-02-17 18:59:53 +0000718 unsigned NoObjCARCExceptionsMDKind;
719
John McCalld935e9c2011-06-15 23:37:01 +0000720 Constant *getRetainRVCallee(Module *M);
721 Constant *getAutoreleaseRVCallee(Module *M);
722 Constant *getReleaseCallee(Module *M);
723 Constant *getRetainCallee(Module *M);
Dan Gohman6320f522011-07-22 22:29:21 +0000724 Constant *getRetainBlockCallee(Module *M);
John McCalld935e9c2011-06-15 23:37:01 +0000725 Constant *getAutoreleaseCallee(Module *M);
726
Dan Gohman728db492012-01-13 00:39:07 +0000727 bool IsRetainBlockOptimizable(const Instruction *Inst);
728
John McCalld935e9c2011-06-15 23:37:01 +0000729 void OptimizeRetainCall(Function &F, Instruction *Retain);
730 bool OptimizeRetainRVCall(Function &F, Instruction *RetainRV);
Michael Gottesman556ff612013-01-12 01:25:19 +0000731 void OptimizeAutoreleaseRVCall(Function &F, Instruction *AutoreleaseRV,
732 InstructionClass &Class);
John McCalld935e9c2011-06-15 23:37:01 +0000733 void OptimizeIndividualCalls(Function &F);
734
735 void CheckForCFGHazards(const BasicBlock *BB,
736 DenseMap<const BasicBlock *, BBState> &BBStates,
737 BBState &MyStates) const;
Dan Gohman817a7c62012-03-22 18:24:56 +0000738 bool VisitInstructionBottomUp(Instruction *Inst,
Dan Gohman5c70fad2012-03-23 17:47:54 +0000739 BasicBlock *BB,
Dan Gohman817a7c62012-03-22 18:24:56 +0000740 MapVector<Value *, RRInfo> &Retains,
741 BBState &MyStates);
John McCalld935e9c2011-06-15 23:37:01 +0000742 bool VisitBottomUp(BasicBlock *BB,
743 DenseMap<const BasicBlock *, BBState> &BBStates,
744 MapVector<Value *, RRInfo> &Retains);
Dan Gohman817a7c62012-03-22 18:24:56 +0000745 bool VisitInstructionTopDown(Instruction *Inst,
746 DenseMap<Value *, RRInfo> &Releases,
747 BBState &MyStates);
John McCalld935e9c2011-06-15 23:37:01 +0000748 bool VisitTopDown(BasicBlock *BB,
749 DenseMap<const BasicBlock *, BBState> &BBStates,
750 DenseMap<Value *, RRInfo> &Releases);
751 bool Visit(Function &F,
752 DenseMap<const BasicBlock *, BBState> &BBStates,
753 MapVector<Value *, RRInfo> &Retains,
754 DenseMap<Value *, RRInfo> &Releases);
755
756 void MoveCalls(Value *Arg, RRInfo &RetainsToMove, RRInfo &ReleasesToMove,
757 MapVector<Value *, RRInfo> &Retains,
758 DenseMap<Value *, RRInfo> &Releases,
Dan Gohman6320f522011-07-22 22:29:21 +0000759 SmallVectorImpl<Instruction *> &DeadInsts,
760 Module *M);
John McCalld935e9c2011-06-15 23:37:01 +0000761
Michael Gottesman9de6f962013-01-22 21:49:00 +0000762 bool ConnectTDBUTraversals(DenseMap<const BasicBlock *, BBState> &BBStates,
763 MapVector<Value *, RRInfo> &Retains,
764 DenseMap<Value *, RRInfo> &Releases,
765 Module *M,
766 SmallVector<Instruction *, 4> &NewRetains,
767 SmallVector<Instruction *, 4> &NewReleases,
768 SmallVector<Instruction *, 8> &DeadInsts,
769 RRInfo &RetainsToMove,
770 RRInfo &ReleasesToMove,
771 Value *Arg,
772 bool KnownSafe,
773 bool &AnyPairsCompletelyEliminated);
774
John McCalld935e9c2011-06-15 23:37:01 +0000775 bool PerformCodePlacement(DenseMap<const BasicBlock *, BBState> &BBStates,
776 MapVector<Value *, RRInfo> &Retains,
Dan Gohman6320f522011-07-22 22:29:21 +0000777 DenseMap<Value *, RRInfo> &Releases,
778 Module *M);
John McCalld935e9c2011-06-15 23:37:01 +0000779
780 void OptimizeWeakCalls(Function &F);
781
782 bool OptimizeSequences(Function &F);
783
784 void OptimizeReturns(Function &F);
785
786 virtual void getAnalysisUsage(AnalysisUsage &AU) const;
787 virtual bool doInitialization(Module &M);
788 virtual bool runOnFunction(Function &F);
789 virtual void releaseMemory();
790
791 public:
792 static char ID;
793 ObjCARCOpt() : FunctionPass(ID) {
794 initializeObjCARCOptPass(*PassRegistry::getPassRegistry());
795 }
796 };
797}
798
799char ObjCARCOpt::ID = 0;
800INITIALIZE_PASS_BEGIN(ObjCARCOpt,
801 "objc-arc", "ObjC ARC optimization", false, false)
802INITIALIZE_PASS_DEPENDENCY(ObjCARCAliasAnalysis)
803INITIALIZE_PASS_END(ObjCARCOpt,
804 "objc-arc", "ObjC ARC optimization", false, false)
805
806Pass *llvm::createObjCARCOptPass() {
807 return new ObjCARCOpt();
808}
809
810void ObjCARCOpt::getAnalysisUsage(AnalysisUsage &AU) const {
811 AU.addRequired<ObjCARCAliasAnalysis>();
812 AU.addRequired<AliasAnalysis>();
813 // ARC optimization doesn't currently split critical edges.
814 AU.setPreservesCFG();
815}
816
Dan Gohman728db492012-01-13 00:39:07 +0000817bool ObjCARCOpt::IsRetainBlockOptimizable(const Instruction *Inst) {
818 // Without the magic metadata tag, we have to assume this might be an
819 // objc_retainBlock call inserted to convert a block pointer to an id,
820 // in which case it really is needed.
821 if (!Inst->getMetadata(CopyOnEscapeMDKind))
822 return false;
823
824 // If the pointer "escapes" (not including being used in a call),
825 // the copy may be needed.
826 if (DoesObjCBlockEscape(Inst))
827 return false;
828
829 // Otherwise, it's not needed.
830 return true;
831}
832
John McCalld935e9c2011-06-15 23:37:01 +0000833Constant *ObjCARCOpt::getRetainRVCallee(Module *M) {
834 if (!RetainRVCallee) {
835 LLVMContext &C = M->getContext();
Jay Foadb804a2b2011-07-12 14:06:48 +0000836 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));
Dan Gohman41375a32012-05-08 23:39:44 +0000837 Type *Params[] = { I8X };
838 FunctionType *FTy = FunctionType::get(I8X, Params, /*isVarArg=*/false);
Bill Wendling3d7b0b82012-12-19 07:18:57 +0000839 AttributeSet Attribute =
Bill Wendling09175b32013-01-22 21:15:51 +0000840 AttributeSet().addAttribute(M->getContext(), AttributeSet::FunctionIndex,
841 Attribute::NoUnwind);
John McCalld935e9c2011-06-15 23:37:01 +0000842 RetainRVCallee =
843 M->getOrInsertFunction("objc_retainAutoreleasedReturnValue", FTy,
Bill Wendling3d7b0b82012-12-19 07:18:57 +0000844 Attribute);
John McCalld935e9c2011-06-15 23:37:01 +0000845 }
846 return RetainRVCallee;
847}
848
849Constant *ObjCARCOpt::getAutoreleaseRVCallee(Module *M) {
850 if (!AutoreleaseRVCallee) {
851 LLVMContext &C = M->getContext();
Jay Foadb804a2b2011-07-12 14:06:48 +0000852 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));
Dan Gohman41375a32012-05-08 23:39:44 +0000853 Type *Params[] = { I8X };
854 FunctionType *FTy = FunctionType::get(I8X, Params, /*isVarArg=*/false);
Bill Wendling3d7b0b82012-12-19 07:18:57 +0000855 AttributeSet Attribute =
Bill Wendling09175b32013-01-22 21:15:51 +0000856 AttributeSet().addAttribute(M->getContext(), AttributeSet::FunctionIndex,
857 Attribute::NoUnwind);
John McCalld935e9c2011-06-15 23:37:01 +0000858 AutoreleaseRVCallee =
859 M->getOrInsertFunction("objc_autoreleaseReturnValue", FTy,
Bill Wendling3d7b0b82012-12-19 07:18:57 +0000860 Attribute);
John McCalld935e9c2011-06-15 23:37:01 +0000861 }
862 return AutoreleaseRVCallee;
863}
864
865Constant *ObjCARCOpt::getReleaseCallee(Module *M) {
866 if (!ReleaseCallee) {
867 LLVMContext &C = M->getContext();
Dan Gohman41375a32012-05-08 23:39:44 +0000868 Type *Params[] = { PointerType::getUnqual(Type::getInt8Ty(C)) };
Bill Wendling3d7b0b82012-12-19 07:18:57 +0000869 AttributeSet Attribute =
Bill Wendling09175b32013-01-22 21:15:51 +0000870 AttributeSet().addAttribute(M->getContext(), AttributeSet::FunctionIndex,
871 Attribute::NoUnwind);
John McCalld935e9c2011-06-15 23:37:01 +0000872 ReleaseCallee =
873 M->getOrInsertFunction(
874 "objc_release",
875 FunctionType::get(Type::getVoidTy(C), Params, /*isVarArg=*/false),
Bill Wendling3d7b0b82012-12-19 07:18:57 +0000876 Attribute);
John McCalld935e9c2011-06-15 23:37:01 +0000877 }
878 return ReleaseCallee;
879}
880
881Constant *ObjCARCOpt::getRetainCallee(Module *M) {
882 if (!RetainCallee) {
883 LLVMContext &C = M->getContext();
Dan Gohman41375a32012-05-08 23:39:44 +0000884 Type *Params[] = { PointerType::getUnqual(Type::getInt8Ty(C)) };
Bill Wendling3d7b0b82012-12-19 07:18:57 +0000885 AttributeSet Attribute =
Bill Wendling09175b32013-01-22 21:15:51 +0000886 AttributeSet().addAttribute(M->getContext(), AttributeSet::FunctionIndex,
887 Attribute::NoUnwind);
John McCalld935e9c2011-06-15 23:37:01 +0000888 RetainCallee =
889 M->getOrInsertFunction(
890 "objc_retain",
891 FunctionType::get(Params[0], Params, /*isVarArg=*/false),
Bill Wendling3d7b0b82012-12-19 07:18:57 +0000892 Attribute);
John McCalld935e9c2011-06-15 23:37:01 +0000893 }
894 return RetainCallee;
895}
896
Dan Gohman6320f522011-07-22 22:29:21 +0000897Constant *ObjCARCOpt::getRetainBlockCallee(Module *M) {
898 if (!RetainBlockCallee) {
899 LLVMContext &C = M->getContext();
Dan Gohman41375a32012-05-08 23:39:44 +0000900 Type *Params[] = { PointerType::getUnqual(Type::getInt8Ty(C)) };
Dan Gohmanfca43c22011-09-14 18:33:34 +0000901 // objc_retainBlock is not nounwind because it calls user copy constructors
902 // which could theoretically throw.
Dan Gohman6320f522011-07-22 22:29:21 +0000903 RetainBlockCallee =
904 M->getOrInsertFunction(
905 "objc_retainBlock",
906 FunctionType::get(Params[0], Params, /*isVarArg=*/false),
Bill Wendlinge94d8432012-12-07 23:16:57 +0000907 AttributeSet());
Dan Gohman6320f522011-07-22 22:29:21 +0000908 }
909 return RetainBlockCallee;
910}
911
John McCalld935e9c2011-06-15 23:37:01 +0000912Constant *ObjCARCOpt::getAutoreleaseCallee(Module *M) {
913 if (!AutoreleaseCallee) {
914 LLVMContext &C = M->getContext();
Dan Gohman41375a32012-05-08 23:39:44 +0000915 Type *Params[] = { PointerType::getUnqual(Type::getInt8Ty(C)) };
Bill Wendling3d7b0b82012-12-19 07:18:57 +0000916 AttributeSet Attribute =
Bill Wendling09175b32013-01-22 21:15:51 +0000917 AttributeSet().addAttribute(M->getContext(), AttributeSet::FunctionIndex,
918 Attribute::NoUnwind);
John McCalld935e9c2011-06-15 23:37:01 +0000919 AutoreleaseCallee =
920 M->getOrInsertFunction(
921 "objc_autorelease",
922 FunctionType::get(Params[0], Params, /*isVarArg=*/false),
Bill Wendling3d7b0b82012-12-19 07:18:57 +0000923 Attribute);
John McCalld935e9c2011-06-15 23:37:01 +0000924 }
925 return AutoreleaseCallee;
926}
927
Michael Gottesman97e3df02013-01-14 00:35:14 +0000928/// Turn objc_retain into objc_retainAutoreleasedReturnValue if the operand is a
929/// return value.
John McCalld935e9c2011-06-15 23:37:01 +0000930void
931ObjCARCOpt::OptimizeRetainCall(Function &F, Instruction *Retain) {
Dan Gohmandae33492012-04-27 18:56:31 +0000932 ImmutableCallSite CS(GetObjCArg(Retain));
933 const Instruction *Call = CS.getInstruction();
John McCalld935e9c2011-06-15 23:37:01 +0000934 if (!Call) return;
935 if (Call->getParent() != Retain->getParent()) return;
936
937 // Check that the call is next to the retain.
Dan Gohmandae33492012-04-27 18:56:31 +0000938 BasicBlock::const_iterator I = Call;
John McCalld935e9c2011-06-15 23:37:01 +0000939 ++I;
940 while (isNoopInstruction(I)) ++I;
941 if (&*I != Retain)
942 return;
943
944 // Turn it to an objc_retainAutoreleasedReturnValue..
945 Changed = true;
946 ++NumPeeps;
Michael Gottesman10426b52013-01-07 21:26:07 +0000947
Michael Gottesman1e00ac62013-01-04 21:30:38 +0000948 DEBUG(dbgs() << "ObjCARCOpt::OptimizeRetainCall: Transforming "
Michael Gottesman9f1be682013-01-12 03:45:49 +0000949 "objc_retain => objc_retainAutoreleasedReturnValue"
950 " since the operand is a return value.\n"
Michael Gottesman1e00ac62013-01-04 21:30:38 +0000951 " Old: "
952 << *Retain << "\n");
Michael Gottesman10426b52013-01-07 21:26:07 +0000953
John McCalld935e9c2011-06-15 23:37:01 +0000954 cast<CallInst>(Retain)->setCalledFunction(getRetainRVCallee(F.getParent()));
Michael Gottesman1e00ac62013-01-04 21:30:38 +0000955
956 DEBUG(dbgs() << " New: "
957 << *Retain << "\n");
John McCalld935e9c2011-06-15 23:37:01 +0000958}
959
Michael Gottesman97e3df02013-01-14 00:35:14 +0000960/// Turn objc_retainAutoreleasedReturnValue into objc_retain if the operand is
961/// not a return value. Or, if it can be paired with an
962/// objc_autoreleaseReturnValue, delete the pair and return true.
John McCalld935e9c2011-06-15 23:37:01 +0000963bool
964ObjCARCOpt::OptimizeRetainRVCall(Function &F, Instruction *RetainRV) {
Dan Gohmane3ed2b02012-03-23 18:09:00 +0000965 // Check for the argument being from an immediately preceding call or invoke.
Dan Gohmandae33492012-04-27 18:56:31 +0000966 const Value *Arg = GetObjCArg(RetainRV);
967 ImmutableCallSite CS(Arg);
968 if (const Instruction *Call = CS.getInstruction()) {
John McCalld935e9c2011-06-15 23:37:01 +0000969 if (Call->getParent() == RetainRV->getParent()) {
Dan Gohmandae33492012-04-27 18:56:31 +0000970 BasicBlock::const_iterator I = Call;
John McCalld935e9c2011-06-15 23:37:01 +0000971 ++I;
972 while (isNoopInstruction(I)) ++I;
973 if (&*I == RetainRV)
974 return false;
Dan Gohmandae33492012-04-27 18:56:31 +0000975 } else if (const InvokeInst *II = dyn_cast<InvokeInst>(Call)) {
Dan Gohmane3ed2b02012-03-23 18:09:00 +0000976 BasicBlock *RetainRVParent = RetainRV->getParent();
977 if (II->getNormalDest() == RetainRVParent) {
Dan Gohmandae33492012-04-27 18:56:31 +0000978 BasicBlock::const_iterator I = RetainRVParent->begin();
Dan Gohmane3ed2b02012-03-23 18:09:00 +0000979 while (isNoopInstruction(I)) ++I;
980 if (&*I == RetainRV)
981 return false;
982 }
John McCalld935e9c2011-06-15 23:37:01 +0000983 }
Dan Gohmane3ed2b02012-03-23 18:09:00 +0000984 }
John McCalld935e9c2011-06-15 23:37:01 +0000985
986 // Check for being preceded by an objc_autoreleaseReturnValue on the same
987 // pointer. In this case, we can delete the pair.
988 BasicBlock::iterator I = RetainRV, Begin = RetainRV->getParent()->begin();
989 if (I != Begin) {
990 do --I; while (I != Begin && isNoopInstruction(I));
991 if (GetBasicInstructionClass(I) == IC_AutoreleaseRV &&
992 GetObjCArg(I) == Arg) {
993 Changed = true;
994 ++NumPeeps;
Michael Gottesman10426b52013-01-07 21:26:07 +0000995
Michael Gottesman5c32ce92013-01-05 17:55:35 +0000996 DEBUG(dbgs() << "ObjCARCOpt::OptimizeRetainRVCall: Erasing " << *I << "\n"
997 << " Erasing " << *RetainRV
998 << "\n");
Michael Gottesman10426b52013-01-07 21:26:07 +0000999
John McCalld935e9c2011-06-15 23:37:01 +00001000 EraseInstruction(I);
1001 EraseInstruction(RetainRV);
1002 return true;
1003 }
1004 }
1005
1006 // Turn it to a plain objc_retain.
1007 Changed = true;
1008 ++NumPeeps;
Michael Gottesman10426b52013-01-07 21:26:07 +00001009
Michael Gottesmandef07bb2013-01-05 17:55:42 +00001010 DEBUG(dbgs() << "ObjCARCOpt::OptimizeRetainRVCall: Transforming "
1011 "objc_retainAutoreleasedReturnValue => "
1012 "objc_retain since the operand is not a return value.\n"
1013 " Old: "
1014 << *RetainRV << "\n");
Michael Gottesman10426b52013-01-07 21:26:07 +00001015
John McCalld935e9c2011-06-15 23:37:01 +00001016 cast<CallInst>(RetainRV)->setCalledFunction(getRetainCallee(F.getParent()));
Michael Gottesmandef07bb2013-01-05 17:55:42 +00001017
1018 DEBUG(dbgs() << " New: "
1019 << *RetainRV << "\n");
1020
John McCalld935e9c2011-06-15 23:37:01 +00001021 return false;
1022}
1023
Michael Gottesman97e3df02013-01-14 00:35:14 +00001024/// Turn objc_autoreleaseReturnValue into objc_autorelease if the result is not
1025/// used as a return value.
John McCalld935e9c2011-06-15 23:37:01 +00001026void
Michael Gottesman556ff612013-01-12 01:25:19 +00001027ObjCARCOpt::OptimizeAutoreleaseRVCall(Function &F, Instruction *AutoreleaseRV,
1028 InstructionClass &Class) {
John McCalld935e9c2011-06-15 23:37:01 +00001029 // Check for a return of the pointer value.
1030 const Value *Ptr = GetObjCArg(AutoreleaseRV);
Dan Gohman10a18d52011-08-12 00:36:31 +00001031 SmallVector<const Value *, 2> Users;
1032 Users.push_back(Ptr);
1033 do {
1034 Ptr = Users.pop_back_val();
1035 for (Value::const_use_iterator UI = Ptr->use_begin(), UE = Ptr->use_end();
1036 UI != UE; ++UI) {
1037 const User *I = *UI;
1038 if (isa<ReturnInst>(I) || GetBasicInstructionClass(I) == IC_RetainRV)
1039 return;
1040 if (isa<BitCastInst>(I))
1041 Users.push_back(I);
1042 }
1043 } while (!Users.empty());
John McCalld935e9c2011-06-15 23:37:01 +00001044
1045 Changed = true;
1046 ++NumPeeps;
Michael Gottesman1bf69082013-01-06 21:07:11 +00001047
1048 DEBUG(dbgs() << "ObjCARCOpt::OptimizeAutoreleaseRVCall: Transforming "
1049 "objc_autoreleaseReturnValue => "
1050 "objc_autorelease since its operand is not used as a return "
1051 "value.\n"
1052 " Old: "
1053 << *AutoreleaseRV << "\n");
1054
Michael Gottesmanc9656fa2013-01-12 01:25:15 +00001055 CallInst *AutoreleaseRVCI = cast<CallInst>(AutoreleaseRV);
1056 AutoreleaseRVCI->
John McCalld935e9c2011-06-15 23:37:01 +00001057 setCalledFunction(getAutoreleaseCallee(F.getParent()));
Michael Gottesmanc9656fa2013-01-12 01:25:15 +00001058 AutoreleaseRVCI->setTailCall(false); // Never tail call objc_autorelease.
Michael Gottesman556ff612013-01-12 01:25:19 +00001059 Class = IC_Autorelease;
Michael Gottesman10426b52013-01-07 21:26:07 +00001060
Michael Gottesman1bf69082013-01-06 21:07:11 +00001061 DEBUG(dbgs() << " New: "
1062 << *AutoreleaseRV << "\n");
Michael Gottesman10426b52013-01-07 21:26:07 +00001063
John McCalld935e9c2011-06-15 23:37:01 +00001064}
1065
Michael Gottesman97e3df02013-01-14 00:35:14 +00001066/// Visit each call, one at a time, and make simplifications without doing any
1067/// additional analysis.
John McCalld935e9c2011-06-15 23:37:01 +00001068void ObjCARCOpt::OptimizeIndividualCalls(Function &F) {
1069 // Reset all the flags in preparation for recomputing them.
1070 UsedInThisFunction = 0;
1071
1072 // Visit all objc_* calls in F.
1073 for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ) {
1074 Instruction *Inst = &*I++;
Michael Gottesman3f146e22013-01-01 16:05:48 +00001075
John McCalld935e9c2011-06-15 23:37:01 +00001076 InstructionClass Class = GetBasicInstructionClass(Inst);
1077
Michael Gottesmand359e062013-01-18 03:08:39 +00001078 DEBUG(dbgs() << "ObjCARCOpt::OptimizeIndividualCalls: Visiting: Class: "
1079 << Class << "; " << *Inst << "\n");
Michael Gottesman782e3442013-01-17 18:32:34 +00001080
John McCalld935e9c2011-06-15 23:37:01 +00001081 switch (Class) {
1082 default: break;
1083
1084 // Delete no-op casts. These function calls have special semantics, but
1085 // the semantics are entirely implemented via lowering in the front-end,
1086 // so by the time they reach the optimizer, they are just no-op calls
1087 // which return their argument.
1088 //
1089 // There are gray areas here, as the ability to cast reference-counted
1090 // pointers to raw void* and back allows code to break ARC assumptions,
1091 // however these are currently considered to be unimportant.
1092 case IC_NoopCast:
1093 Changed = true;
1094 ++NumNoops;
Michael Gottesmandc042f02013-01-06 21:07:15 +00001095 DEBUG(dbgs() << "ObjCARCOpt::OptimizeIndividualCalls: Erasing no-op cast:"
1096 " " << *Inst << "\n");
John McCalld935e9c2011-06-15 23:37:01 +00001097 EraseInstruction(Inst);
1098 continue;
1099
1100 // If the pointer-to-weak-pointer is null, it's undefined behavior.
1101 case IC_StoreWeak:
1102 case IC_LoadWeak:
1103 case IC_LoadWeakRetained:
1104 case IC_InitWeak:
1105 case IC_DestroyWeak: {
1106 CallInst *CI = cast<CallInst>(Inst);
1107 if (isNullOrUndef(CI->getArgOperand(0))) {
Dan Gohman670f9372012-04-13 18:57:48 +00001108 Changed = true;
Chris Lattner229907c2011-07-18 04:54:35 +00001109 Type *Ty = CI->getArgOperand(0)->getType();
John McCalld935e9c2011-06-15 23:37:01 +00001110 new StoreInst(UndefValue::get(cast<PointerType>(Ty)->getElementType()),
1111 Constant::getNullValue(Ty),
1112 CI);
Michael Gottesman10426b52013-01-07 21:26:07 +00001113 llvm::Value *NewValue = UndefValue::get(CI->getType());
Michael Gottesmanfec61c02013-01-06 21:54:30 +00001114 DEBUG(dbgs() << "ObjCARCOpt::OptimizeIndividualCalls: A null "
1115 "pointer-to-weak-pointer is undefined behavior.\n"
1116 " Old = " << *CI <<
1117 "\n New = " <<
Michael Gottesman10426b52013-01-07 21:26:07 +00001118 *NewValue << "\n");
Michael Gottesmanfec61c02013-01-06 21:54:30 +00001119 CI->replaceAllUsesWith(NewValue);
John McCalld935e9c2011-06-15 23:37:01 +00001120 CI->eraseFromParent();
1121 continue;
1122 }
1123 break;
1124 }
1125 case IC_CopyWeak:
1126 case IC_MoveWeak: {
1127 CallInst *CI = cast<CallInst>(Inst);
1128 if (isNullOrUndef(CI->getArgOperand(0)) ||
1129 isNullOrUndef(CI->getArgOperand(1))) {
Dan Gohman670f9372012-04-13 18:57:48 +00001130 Changed = true;
Chris Lattner229907c2011-07-18 04:54:35 +00001131 Type *Ty = CI->getArgOperand(0)->getType();
John McCalld935e9c2011-06-15 23:37:01 +00001132 new StoreInst(UndefValue::get(cast<PointerType>(Ty)->getElementType()),
1133 Constant::getNullValue(Ty),
1134 CI);
Michael Gottesmanfec61c02013-01-06 21:54:30 +00001135
1136 llvm::Value *NewValue = UndefValue::get(CI->getType());
1137 DEBUG(dbgs() << "ObjCARCOpt::OptimizeIndividualCalls: A null "
1138 "pointer-to-weak-pointer is undefined behavior.\n"
1139 " Old = " << *CI <<
1140 "\n New = " <<
1141 *NewValue << "\n");
Michael Gottesman10426b52013-01-07 21:26:07 +00001142
Michael Gottesmanfec61c02013-01-06 21:54:30 +00001143 CI->replaceAllUsesWith(NewValue);
John McCalld935e9c2011-06-15 23:37:01 +00001144 CI->eraseFromParent();
1145 continue;
1146 }
1147 break;
1148 }
1149 case IC_Retain:
1150 OptimizeRetainCall(F, Inst);
1151 break;
1152 case IC_RetainRV:
1153 if (OptimizeRetainRVCall(F, Inst))
1154 continue;
1155 break;
1156 case IC_AutoreleaseRV:
Michael Gottesman556ff612013-01-12 01:25:19 +00001157 OptimizeAutoreleaseRVCall(F, Inst, Class);
John McCalld935e9c2011-06-15 23:37:01 +00001158 break;
1159 }
1160
1161 // objc_autorelease(x) -> objc_release(x) if x is otherwise unused.
1162 if (IsAutorelease(Class) && Inst->use_empty()) {
1163 CallInst *Call = cast<CallInst>(Inst);
1164 const Value *Arg = Call->getArgOperand(0);
1165 Arg = FindSingleUseIdentifiedObject(Arg);
1166 if (Arg) {
1167 Changed = true;
1168 ++NumAutoreleases;
1169
1170 // Create the declaration lazily.
1171 LLVMContext &C = Inst->getContext();
1172 CallInst *NewCall =
1173 CallInst::Create(getReleaseCallee(F.getParent()),
1174 Call->getArgOperand(0), "", Call);
1175 NewCall->setMetadata(ImpreciseReleaseMDKind,
1176 MDNode::get(C, ArrayRef<Value *>()));
Michael Gottesman10426b52013-01-07 21:26:07 +00001177
Michael Gottesmana6a1dad2013-01-06 22:56:50 +00001178 DEBUG(dbgs() << "ObjCARCOpt::OptimizeIndividualCalls: Replacing "
1179 "objc_autorelease(x) with objc_release(x) since x is "
1180 "otherwise unused.\n"
Michael Gottesman4bf6e752013-01-06 22:56:54 +00001181 " Old: " << *Call <<
Michael Gottesmana6a1dad2013-01-06 22:56:50 +00001182 "\n New: " <<
1183 *NewCall << "\n");
Michael Gottesman10426b52013-01-07 21:26:07 +00001184
John McCalld935e9c2011-06-15 23:37:01 +00001185 EraseInstruction(Call);
1186 Inst = NewCall;
1187 Class = IC_Release;
1188 }
1189 }
1190
1191 // For functions which can never be passed stack arguments, add
1192 // a tail keyword.
1193 if (IsAlwaysTail(Class)) {
1194 Changed = true;
Michael Gottesman2d763312013-01-06 23:39:09 +00001195 DEBUG(dbgs() << "ObjCARCOpt::OptimizeIndividualCalls: Adding tail keyword"
1196 " to function since it can never be passed stack args: " << *Inst <<
1197 "\n");
John McCalld935e9c2011-06-15 23:37:01 +00001198 cast<CallInst>(Inst)->setTailCall();
1199 }
1200
Michael Gottesmanc9656fa2013-01-12 01:25:15 +00001201 // Ensure that functions that can never have a "tail" keyword due to the
1202 // semantics of ARC truly do not do so.
1203 if (IsNeverTail(Class)) {
1204 Changed = true;
Michael Gottesman4385edf2013-01-14 01:47:53 +00001205 DEBUG(dbgs() << "ObjCARCOpt::OptimizeIndividualCalls: Removing tail "
1206 "keyword from function: " << *Inst <<
Michael Gottesmanc9656fa2013-01-12 01:25:15 +00001207 "\n");
1208 cast<CallInst>(Inst)->setTailCall(false);
1209 }
1210
John McCalld935e9c2011-06-15 23:37:01 +00001211 // Set nounwind as needed.
1212 if (IsNoThrow(Class)) {
1213 Changed = true;
Michael Gottesman8800a512013-01-06 23:39:13 +00001214 DEBUG(dbgs() << "ObjCARCOpt::OptimizeIndividualCalls: Found no throw"
1215 " class. Setting nounwind on: " << *Inst << "\n");
John McCalld935e9c2011-06-15 23:37:01 +00001216 cast<CallInst>(Inst)->setDoesNotThrow();
1217 }
1218
1219 if (!IsNoopOnNull(Class)) {
1220 UsedInThisFunction |= 1 << Class;
1221 continue;
1222 }
1223
1224 const Value *Arg = GetObjCArg(Inst);
1225
1226 // ARC calls with null are no-ops. Delete them.
1227 if (isNullOrUndef(Arg)) {
1228 Changed = true;
1229 ++NumNoops;
Michael Gottesman5b970e12013-01-07 00:04:52 +00001230 DEBUG(dbgs() << "ObjCARCOpt::OptimizeIndividualCalls: ARC calls with "
1231 " null are no-ops. Erasing: " << *Inst << "\n");
John McCalld935e9c2011-06-15 23:37:01 +00001232 EraseInstruction(Inst);
1233 continue;
1234 }
1235
1236 // Keep track of which of retain, release, autorelease, and retain_block
1237 // are actually present in this function.
1238 UsedInThisFunction |= 1 << Class;
1239
1240 // If Arg is a PHI, and one or more incoming values to the
1241 // PHI are null, and the call is control-equivalent to the PHI, and there
1242 // are no relevant side effects between the PHI and the call, the call
1243 // could be pushed up to just those paths with non-null incoming values.
1244 // For now, don't bother splitting critical edges for this.
1245 SmallVector<std::pair<Instruction *, const Value *>, 4> Worklist;
1246 Worklist.push_back(std::make_pair(Inst, Arg));
1247 do {
1248 std::pair<Instruction *, const Value *> Pair = Worklist.pop_back_val();
1249 Inst = Pair.first;
1250 Arg = Pair.second;
1251
1252 const PHINode *PN = dyn_cast<PHINode>(Arg);
1253 if (!PN) continue;
1254
1255 // Determine if the PHI has any null operands, or any incoming
1256 // critical edges.
1257 bool HasNull = false;
1258 bool HasCriticalEdges = false;
1259 for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) {
1260 Value *Incoming =
1261 StripPointerCastsAndObjCCalls(PN->getIncomingValue(i));
1262 if (isNullOrUndef(Incoming))
1263 HasNull = true;
1264 else if (cast<TerminatorInst>(PN->getIncomingBlock(i)->back())
1265 .getNumSuccessors() != 1) {
1266 HasCriticalEdges = true;
1267 break;
1268 }
1269 }
1270 // If we have null operands and no critical edges, optimize.
1271 if (!HasCriticalEdges && HasNull) {
1272 SmallPtrSet<Instruction *, 4> DependingInstructions;
1273 SmallPtrSet<const BasicBlock *, 4> Visited;
1274
1275 // Check that there is nothing that cares about the reference
1276 // count between the call and the phi.
Dan Gohman8478d762012-04-13 00:59:57 +00001277 switch (Class) {
1278 case IC_Retain:
1279 case IC_RetainBlock:
1280 // These can always be moved up.
1281 break;
1282 case IC_Release:
Dan Gohman41375a32012-05-08 23:39:44 +00001283 // These can't be moved across things that care about the retain
1284 // count.
Dan Gohman8478d762012-04-13 00:59:57 +00001285 FindDependencies(NeedsPositiveRetainCount, Arg,
1286 Inst->getParent(), Inst,
1287 DependingInstructions, Visited, PA);
1288 break;
1289 case IC_Autorelease:
1290 // These can't be moved across autorelease pool scope boundaries.
1291 FindDependencies(AutoreleasePoolBoundary, Arg,
1292 Inst->getParent(), Inst,
1293 DependingInstructions, Visited, PA);
1294 break;
1295 case IC_RetainRV:
1296 case IC_AutoreleaseRV:
1297 // Don't move these; the RV optimization depends on the autoreleaseRV
1298 // being tail called, and the retainRV being immediately after a call
1299 // (which might still happen if we get lucky with codegen layout, but
1300 // it's not worth taking the chance).
1301 continue;
1302 default:
1303 llvm_unreachable("Invalid dependence flavor");
1304 }
1305
John McCalld935e9c2011-06-15 23:37:01 +00001306 if (DependingInstructions.size() == 1 &&
1307 *DependingInstructions.begin() == PN) {
1308 Changed = true;
1309 ++NumPartialNoops;
1310 // Clone the call into each predecessor that has a non-null value.
1311 CallInst *CInst = cast<CallInst>(Inst);
Chris Lattner229907c2011-07-18 04:54:35 +00001312 Type *ParamTy = CInst->getArgOperand(0)->getType();
John McCalld935e9c2011-06-15 23:37:01 +00001313 for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) {
1314 Value *Incoming =
1315 StripPointerCastsAndObjCCalls(PN->getIncomingValue(i));
1316 if (!isNullOrUndef(Incoming)) {
1317 CallInst *Clone = cast<CallInst>(CInst->clone());
1318 Value *Op = PN->getIncomingValue(i);
1319 Instruction *InsertPos = &PN->getIncomingBlock(i)->back();
1320 if (Op->getType() != ParamTy)
1321 Op = new BitCastInst(Op, ParamTy, "", InsertPos);
1322 Clone->setArgOperand(0, Op);
1323 Clone->insertBefore(InsertPos);
Michael Gottesmanc189a392013-01-09 19:23:24 +00001324
1325 DEBUG(dbgs() << "ObjCARCOpt::OptimizeIndividualCalls: Cloning "
1326 << *CInst << "\n"
1327 " And inserting "
1328 "clone at " << *InsertPos << "\n");
John McCalld935e9c2011-06-15 23:37:01 +00001329 Worklist.push_back(std::make_pair(Clone, Incoming));
1330 }
1331 }
1332 // Erase the original call.
Michael Gottesmanc189a392013-01-09 19:23:24 +00001333 DEBUG(dbgs() << "Erasing: " << *CInst << "\n");
John McCalld935e9c2011-06-15 23:37:01 +00001334 EraseInstruction(CInst);
1335 continue;
1336 }
1337 }
1338 } while (!Worklist.empty());
1339 }
Michael Gottesmanb24bdef2013-01-12 02:57:16 +00001340 DEBUG(dbgs() << "ObjCARCOpt::OptimizeIndividualCalls: Finished List.\n");
John McCalld935e9c2011-06-15 23:37:01 +00001341}
1342
Michael Gottesman97e3df02013-01-14 00:35:14 +00001343/// Check for critical edges, loop boundaries, irreducible control flow, or
1344/// other CFG structures where moving code across the edge would result in it
1345/// being executed more.
John McCalld935e9c2011-06-15 23:37:01 +00001346void
1347ObjCARCOpt::CheckForCFGHazards(const BasicBlock *BB,
1348 DenseMap<const BasicBlock *, BBState> &BBStates,
1349 BBState &MyStates) const {
1350 // If any top-down local-use or possible-dec has a succ which is earlier in
1351 // the sequence, forget it.
Dan Gohman55b06742012-03-02 01:13:53 +00001352 for (BBState::ptr_iterator I = MyStates.top_down_ptr_begin(),
John McCalld935e9c2011-06-15 23:37:01 +00001353 E = MyStates.top_down_ptr_end(); I != E; ++I)
1354 switch (I->second.GetSeq()) {
1355 default: break;
1356 case S_Use: {
1357 const Value *Arg = I->first;
1358 const TerminatorInst *TI = cast<TerminatorInst>(&BB->back());
1359 bool SomeSuccHasSame = false;
1360 bool AllSuccsHaveSame = true;
Dan Gohman55b06742012-03-02 01:13:53 +00001361 PtrState &S = I->second;
Dan Gohman0155f302012-02-17 18:59:53 +00001362 succ_const_iterator SI(TI), SE(TI, false);
1363
Dan Gohman0155f302012-02-17 18:59:53 +00001364 for (; SI != SE; ++SI) {
Dan Gohman362eb692012-03-02 01:26:46 +00001365 Sequence SuccSSeq = S_None;
1366 bool SuccSRRIKnownSafe = false;
Dan Gohman41375a32012-05-08 23:39:44 +00001367 // If VisitBottomUp has pointer information for this successor, take
1368 // what we know about it.
Dan Gohmandae33492012-04-27 18:56:31 +00001369 DenseMap<const BasicBlock *, BBState>::iterator BBI =
1370 BBStates.find(*SI);
1371 assert(BBI != BBStates.end());
1372 const PtrState &SuccS = BBI->second.getPtrBottomUpState(Arg);
1373 SuccSSeq = SuccS.GetSeq();
1374 SuccSRRIKnownSafe = SuccS.RRI.KnownSafe;
Dan Gohman362eb692012-03-02 01:26:46 +00001375 switch (SuccSSeq) {
John McCalld935e9c2011-06-15 23:37:01 +00001376 case S_None:
Dan Gohman12130272011-08-12 00:26:31 +00001377 case S_CanRelease: {
Dan Gohman362eb692012-03-02 01:26:46 +00001378 if (!S.RRI.KnownSafe && !SuccSRRIKnownSafe) {
Dan Gohman12130272011-08-12 00:26:31 +00001379 S.ClearSequenceProgress();
Dan Gohman362eb692012-03-02 01:26:46 +00001380 break;
1381 }
Dan Gohman12130272011-08-12 00:26:31 +00001382 continue;
1383 }
John McCalld935e9c2011-06-15 23:37:01 +00001384 case S_Use:
1385 SomeSuccHasSame = true;
1386 break;
1387 case S_Stop:
1388 case S_Release:
1389 case S_MovableRelease:
Dan Gohman362eb692012-03-02 01:26:46 +00001390 if (!S.RRI.KnownSafe && !SuccSRRIKnownSafe)
Dan Gohman12130272011-08-12 00:26:31 +00001391 AllSuccsHaveSame = false;
John McCalld935e9c2011-06-15 23:37:01 +00001392 break;
1393 case S_Retain:
1394 llvm_unreachable("bottom-up pointer in retain state!");
1395 }
Dan Gohman12130272011-08-12 00:26:31 +00001396 }
John McCalld935e9c2011-06-15 23:37:01 +00001397 // If the state at the other end of any of the successor edges
1398 // matches the current state, require all edges to match. This
1399 // guards against loops in the middle of a sequence.
1400 if (SomeSuccHasSame && !AllSuccsHaveSame)
Dan Gohman12130272011-08-12 00:26:31 +00001401 S.ClearSequenceProgress();
Dan Gohman044437062011-12-12 18:13:53 +00001402 break;
John McCalld935e9c2011-06-15 23:37:01 +00001403 }
1404 case S_CanRelease: {
1405 const Value *Arg = I->first;
1406 const TerminatorInst *TI = cast<TerminatorInst>(&BB->back());
1407 bool SomeSuccHasSame = false;
1408 bool AllSuccsHaveSame = true;
Dan Gohman55b06742012-03-02 01:13:53 +00001409 PtrState &S = I->second;
Dan Gohman0155f302012-02-17 18:59:53 +00001410 succ_const_iterator SI(TI), SE(TI, false);
1411
Dan Gohman0155f302012-02-17 18:59:53 +00001412 for (; SI != SE; ++SI) {
Dan Gohman362eb692012-03-02 01:26:46 +00001413 Sequence SuccSSeq = S_None;
1414 bool SuccSRRIKnownSafe = false;
Dan Gohman41375a32012-05-08 23:39:44 +00001415 // If VisitBottomUp has pointer information for this successor, take
1416 // what we know about it.
Dan Gohmandae33492012-04-27 18:56:31 +00001417 DenseMap<const BasicBlock *, BBState>::iterator BBI =
1418 BBStates.find(*SI);
1419 assert(BBI != BBStates.end());
1420 const PtrState &SuccS = BBI->second.getPtrBottomUpState(Arg);
1421 SuccSSeq = SuccS.GetSeq();
1422 SuccSRRIKnownSafe = SuccS.RRI.KnownSafe;
Dan Gohman362eb692012-03-02 01:26:46 +00001423 switch (SuccSSeq) {
Dan Gohman12130272011-08-12 00:26:31 +00001424 case S_None: {
Dan Gohman362eb692012-03-02 01:26:46 +00001425 if (!S.RRI.KnownSafe && !SuccSRRIKnownSafe) {
Dan Gohman12130272011-08-12 00:26:31 +00001426 S.ClearSequenceProgress();
Dan Gohman362eb692012-03-02 01:26:46 +00001427 break;
1428 }
Dan Gohman12130272011-08-12 00:26:31 +00001429 continue;
1430 }
John McCalld935e9c2011-06-15 23:37:01 +00001431 case S_CanRelease:
1432 SomeSuccHasSame = true;
1433 break;
1434 case S_Stop:
1435 case S_Release:
1436 case S_MovableRelease:
1437 case S_Use:
Dan Gohman362eb692012-03-02 01:26:46 +00001438 if (!S.RRI.KnownSafe && !SuccSRRIKnownSafe)
Dan Gohman12130272011-08-12 00:26:31 +00001439 AllSuccsHaveSame = false;
John McCalld935e9c2011-06-15 23:37:01 +00001440 break;
1441 case S_Retain:
1442 llvm_unreachable("bottom-up pointer in retain state!");
1443 }
Dan Gohman12130272011-08-12 00:26:31 +00001444 }
John McCalld935e9c2011-06-15 23:37:01 +00001445 // If the state at the other end of any of the successor edges
1446 // matches the current state, require all edges to match. This
1447 // guards against loops in the middle of a sequence.
1448 if (SomeSuccHasSame && !AllSuccsHaveSame)
Dan Gohman12130272011-08-12 00:26:31 +00001449 S.ClearSequenceProgress();
Dan Gohman044437062011-12-12 18:13:53 +00001450 break;
John McCalld935e9c2011-06-15 23:37:01 +00001451 }
1452 }
1453}
1454
1455bool
Dan Gohman817a7c62012-03-22 18:24:56 +00001456ObjCARCOpt::VisitInstructionBottomUp(Instruction *Inst,
Dan Gohman5c70fad2012-03-23 17:47:54 +00001457 BasicBlock *BB,
Dan Gohman817a7c62012-03-22 18:24:56 +00001458 MapVector<Value *, RRInfo> &Retains,
1459 BBState &MyStates) {
1460 bool NestingDetected = false;
1461 InstructionClass Class = GetInstructionClass(Inst);
1462 const Value *Arg = 0;
1463
1464 switch (Class) {
1465 case IC_Release: {
1466 Arg = GetObjCArg(Inst);
1467
1468 PtrState &S = MyStates.getPtrBottomUpState(Arg);
1469
1470 // If we see two releases in a row on the same pointer. If so, make
1471 // a note, and we'll cicle back to revisit it after we've
1472 // hopefully eliminated the second release, which may allow us to
1473 // eliminate the first release too.
1474 // Theoretically we could implement removal of nested retain+release
1475 // pairs by making PtrState hold a stack of states, but this is
1476 // simple and avoids adding overhead for the non-nested case.
Michael Gottesmanaf2113f2013-01-13 07:00:51 +00001477 if (S.GetSeq() == S_Release || S.GetSeq() == S_MovableRelease) {
1478 DEBUG(dbgs() << "ObjCARCOpt::VisitInstructionBottomUp: Found nested "
1479 "releases (i.e. a release pair)\n");
Dan Gohman817a7c62012-03-22 18:24:56 +00001480 NestingDetected = true;
Michael Gottesmanaf2113f2013-01-13 07:00:51 +00001481 }
Dan Gohman817a7c62012-03-22 18:24:56 +00001482
Dan Gohman817a7c62012-03-22 18:24:56 +00001483 MDNode *ReleaseMetadata = Inst->getMetadata(ImpreciseReleaseMDKind);
Dan Gohman62079b42012-04-25 00:50:46 +00001484 S.ResetSequenceProgress(ReleaseMetadata ? S_MovableRelease : S_Release);
Dan Gohman817a7c62012-03-22 18:24:56 +00001485 S.RRI.ReleaseMetadata = ReleaseMetadata;
Dan Gohmandf476e52012-09-04 23:16:20 +00001486 S.RRI.KnownSafe = S.IsKnownIncremented();
Dan Gohman817a7c62012-03-22 18:24:56 +00001487 S.RRI.IsTailCallRelease = cast<CallInst>(Inst)->isTailCall();
1488 S.RRI.Calls.insert(Inst);
1489
Dan Gohmandf476e52012-09-04 23:16:20 +00001490 S.SetKnownPositiveRefCount();
Dan Gohman817a7c62012-03-22 18:24:56 +00001491 break;
1492 }
1493 case IC_RetainBlock:
1494 // An objc_retainBlock call with just a use may need to be kept,
1495 // because it may be copying a block from the stack to the heap.
1496 if (!IsRetainBlockOptimizable(Inst))
1497 break;
1498 // FALLTHROUGH
1499 case IC_Retain:
1500 case IC_RetainRV: {
1501 Arg = GetObjCArg(Inst);
1502
1503 PtrState &S = MyStates.getPtrBottomUpState(Arg);
Dan Gohman62079b42012-04-25 00:50:46 +00001504 S.SetKnownPositiveRefCount();
Dan Gohman817a7c62012-03-22 18:24:56 +00001505
1506 switch (S.GetSeq()) {
1507 case S_Stop:
1508 case S_Release:
1509 case S_MovableRelease:
1510 case S_Use:
1511 S.RRI.ReverseInsertPts.clear();
1512 // FALL THROUGH
1513 case S_CanRelease:
1514 // Don't do retain+release tracking for IC_RetainRV, because it's
1515 // better to let it remain as the first instruction after a call.
1516 if (Class != IC_RetainRV) {
1517 S.RRI.IsRetainBlock = Class == IC_RetainBlock;
1518 Retains[Inst] = S.RRI;
1519 }
1520 S.ClearSequenceProgress();
1521 break;
1522 case S_None:
1523 break;
1524 case S_Retain:
1525 llvm_unreachable("bottom-up pointer in retain state!");
1526 }
1527 return NestingDetected;
1528 }
1529 case IC_AutoreleasepoolPop:
1530 // Conservatively, clear MyStates for all known pointers.
1531 MyStates.clearBottomUpPointers();
1532 return NestingDetected;
1533 case IC_AutoreleasepoolPush:
1534 case IC_None:
1535 // These are irrelevant.
1536 return NestingDetected;
1537 default:
1538 break;
1539 }
1540
1541 // Consider any other possible effects of this instruction on each
1542 // pointer being tracked.
1543 for (BBState::ptr_iterator MI = MyStates.bottom_up_ptr_begin(),
1544 ME = MyStates.bottom_up_ptr_end(); MI != ME; ++MI) {
1545 const Value *Ptr = MI->first;
1546 if (Ptr == Arg)
1547 continue; // Handled above.
1548 PtrState &S = MI->second;
1549 Sequence Seq = S.GetSeq();
1550
1551 // Check for possible releases.
1552 if (CanAlterRefCount(Inst, Ptr, PA, Class)) {
Dan Gohman62079b42012-04-25 00:50:46 +00001553 S.ClearRefCount();
Dan Gohman817a7c62012-03-22 18:24:56 +00001554 switch (Seq) {
1555 case S_Use:
1556 S.SetSeq(S_CanRelease);
1557 continue;
1558 case S_CanRelease:
1559 case S_Release:
1560 case S_MovableRelease:
1561 case S_Stop:
1562 case S_None:
1563 break;
1564 case S_Retain:
1565 llvm_unreachable("bottom-up pointer in retain state!");
1566 }
1567 }
1568
1569 // Check for possible direct uses.
1570 switch (Seq) {
1571 case S_Release:
1572 case S_MovableRelease:
1573 if (CanUse(Inst, Ptr, PA, Class)) {
1574 assert(S.RRI.ReverseInsertPts.empty());
Dan Gohman5c70fad2012-03-23 17:47:54 +00001575 // If this is an invoke instruction, we're scanning it as part of
1576 // one of its successor blocks, since we can't insert code after it
1577 // in its own block, and we don't want to split critical edges.
1578 if (isa<InvokeInst>(Inst))
1579 S.RRI.ReverseInsertPts.insert(BB->getFirstInsertionPt());
1580 else
Francois Pichet4b9ab742012-03-24 01:36:37 +00001581 S.RRI.ReverseInsertPts.insert(llvm::next(BasicBlock::iterator(Inst)));
Dan Gohman817a7c62012-03-22 18:24:56 +00001582 S.SetSeq(S_Use);
1583 } else if (Seq == S_Release &&
1584 (Class == IC_User || Class == IC_CallOrUser)) {
1585 // Non-movable releases depend on any possible objc pointer use.
1586 S.SetSeq(S_Stop);
1587 assert(S.RRI.ReverseInsertPts.empty());
Dan Gohman5c70fad2012-03-23 17:47:54 +00001588 // As above; handle invoke specially.
1589 if (isa<InvokeInst>(Inst))
1590 S.RRI.ReverseInsertPts.insert(BB->getFirstInsertionPt());
1591 else
Francois Pichet4b9ab742012-03-24 01:36:37 +00001592 S.RRI.ReverseInsertPts.insert(llvm::next(BasicBlock::iterator(Inst)));
Dan Gohman817a7c62012-03-22 18:24:56 +00001593 }
1594 break;
1595 case S_Stop:
1596 if (CanUse(Inst, Ptr, PA, Class))
1597 S.SetSeq(S_Use);
1598 break;
1599 case S_CanRelease:
1600 case S_Use:
1601 case S_None:
1602 break;
1603 case S_Retain:
1604 llvm_unreachable("bottom-up pointer in retain state!");
1605 }
1606 }
1607
1608 return NestingDetected;
1609}
1610
1611bool
John McCalld935e9c2011-06-15 23:37:01 +00001612ObjCARCOpt::VisitBottomUp(BasicBlock *BB,
1613 DenseMap<const BasicBlock *, BBState> &BBStates,
1614 MapVector<Value *, RRInfo> &Retains) {
1615 bool NestingDetected = false;
1616 BBState &MyStates = BBStates[BB];
1617
1618 // Merge the states from each successor to compute the initial state
1619 // for the current block.
Dan Gohman10c82ce2012-08-27 18:31:36 +00001620 BBState::edge_iterator SI(MyStates.succ_begin()),
1621 SE(MyStates.succ_end());
1622 if (SI != SE) {
Dan Gohmanc24c66f2012-04-24 22:53:18 +00001623 const BasicBlock *Succ = *SI;
1624 DenseMap<const BasicBlock *, BBState>::iterator I = BBStates.find(Succ);
1625 assert(I != BBStates.end());
1626 MyStates.InitFromSucc(I->second);
1627 ++SI;
1628 for (; SI != SE; ++SI) {
1629 Succ = *SI;
1630 I = BBStates.find(Succ);
1631 assert(I != BBStates.end());
1632 MyStates.MergeSucc(I->second);
1633 }
Dan Gohman0155f302012-02-17 18:59:53 +00001634 }
John McCalld935e9c2011-06-15 23:37:01 +00001635
1636 // Visit all the instructions, bottom-up.
1637 for (BasicBlock::iterator I = BB->end(), E = BB->begin(); I != E; --I) {
1638 Instruction *Inst = llvm::prior(I);
Dan Gohman5c70fad2012-03-23 17:47:54 +00001639
1640 // Invoke instructions are visited as part of their successors (below).
1641 if (isa<InvokeInst>(Inst))
1642 continue;
1643
Michael Gottesmanaf2113f2013-01-13 07:00:51 +00001644 DEBUG(dbgs() << "ObjCARCOpt::VisitButtonUp: Visiting " << *Inst << "\n");
1645
Dan Gohman5c70fad2012-03-23 17:47:54 +00001646 NestingDetected |= VisitInstructionBottomUp(Inst, BB, Retains, MyStates);
1647 }
1648
Dan Gohmandae33492012-04-27 18:56:31 +00001649 // If there's a predecessor with an invoke, visit the invoke as if it were
1650 // part of this block, since we can't insert code after an invoke in its own
1651 // block, and we don't want to split critical edges.
Dan Gohmanc24c66f2012-04-24 22:53:18 +00001652 for (BBState::edge_iterator PI(MyStates.pred_begin()),
1653 PE(MyStates.pred_end()); PI != PE; ++PI) {
Dan Gohman5c70fad2012-03-23 17:47:54 +00001654 BasicBlock *Pred = *PI;
Dan Gohmandae33492012-04-27 18:56:31 +00001655 if (InvokeInst *II = dyn_cast<InvokeInst>(&Pred->back()))
1656 NestingDetected |= VisitInstructionBottomUp(II, BB, Retains, MyStates);
Dan Gohman817a7c62012-03-22 18:24:56 +00001657 }
John McCalld935e9c2011-06-15 23:37:01 +00001658
Dan Gohman817a7c62012-03-22 18:24:56 +00001659 return NestingDetected;
1660}
John McCalld935e9c2011-06-15 23:37:01 +00001661
Dan Gohman817a7c62012-03-22 18:24:56 +00001662bool
1663ObjCARCOpt::VisitInstructionTopDown(Instruction *Inst,
1664 DenseMap<Value *, RRInfo> &Releases,
1665 BBState &MyStates) {
1666 bool NestingDetected = false;
1667 InstructionClass Class = GetInstructionClass(Inst);
1668 const Value *Arg = 0;
John McCalld935e9c2011-06-15 23:37:01 +00001669
Dan Gohman817a7c62012-03-22 18:24:56 +00001670 switch (Class) {
1671 case IC_RetainBlock:
1672 // An objc_retainBlock call with just a use may need to be kept,
1673 // because it may be copying a block from the stack to the heap.
1674 if (!IsRetainBlockOptimizable(Inst))
1675 break;
1676 // FALLTHROUGH
1677 case IC_Retain:
1678 case IC_RetainRV: {
1679 Arg = GetObjCArg(Inst);
1680
1681 PtrState &S = MyStates.getPtrTopDownState(Arg);
1682
1683 // Don't do retain+release tracking for IC_RetainRV, because it's
1684 // better to let it remain as the first instruction after a call.
1685 if (Class != IC_RetainRV) {
1686 // If we see two retains in a row on the same pointer. If so, make
John McCalld935e9c2011-06-15 23:37:01 +00001687 // a note, and we'll cicle back to revisit it after we've
Dan Gohman817a7c62012-03-22 18:24:56 +00001688 // hopefully eliminated the second retain, which may allow us to
1689 // eliminate the first retain too.
John McCalld935e9c2011-06-15 23:37:01 +00001690 // Theoretically we could implement removal of nested retain+release
1691 // pairs by making PtrState hold a stack of states, but this is
1692 // simple and avoids adding overhead for the non-nested case.
Dan Gohman817a7c62012-03-22 18:24:56 +00001693 if (S.GetSeq() == S_Retain)
John McCalld935e9c2011-06-15 23:37:01 +00001694 NestingDetected = true;
1695
Dan Gohman62079b42012-04-25 00:50:46 +00001696 S.ResetSequenceProgress(S_Retain);
Dan Gohman817a7c62012-03-22 18:24:56 +00001697 S.RRI.IsRetainBlock = Class == IC_RetainBlock;
Dan Gohmandf476e52012-09-04 23:16:20 +00001698 S.RRI.KnownSafe = S.IsKnownIncremented();
John McCalld935e9c2011-06-15 23:37:01 +00001699 S.RRI.Calls.insert(Inst);
John McCalld935e9c2011-06-15 23:37:01 +00001700 }
John McCalld935e9c2011-06-15 23:37:01 +00001701
Dan Gohmandf476e52012-09-04 23:16:20 +00001702 S.SetKnownPositiveRefCount();
Dan Gohmanf64ff8e2012-07-23 19:27:31 +00001703
1704 // A retain can be a potential use; procede to the generic checking
1705 // code below.
1706 break;
Dan Gohman817a7c62012-03-22 18:24:56 +00001707 }
1708 case IC_Release: {
1709 Arg = GetObjCArg(Inst);
1710
1711 PtrState &S = MyStates.getPtrTopDownState(Arg);
Dan Gohmandf476e52012-09-04 23:16:20 +00001712 S.ClearRefCount();
Dan Gohman817a7c62012-03-22 18:24:56 +00001713
1714 switch (S.GetSeq()) {
1715 case S_Retain:
1716 case S_CanRelease:
1717 S.RRI.ReverseInsertPts.clear();
1718 // FALL THROUGH
1719 case S_Use:
1720 S.RRI.ReleaseMetadata = Inst->getMetadata(ImpreciseReleaseMDKind);
1721 S.RRI.IsTailCallRelease = cast<CallInst>(Inst)->isTailCall();
1722 Releases[Inst] = S.RRI;
1723 S.ClearSequenceProgress();
1724 break;
1725 case S_None:
1726 break;
1727 case S_Stop:
1728 case S_Release:
1729 case S_MovableRelease:
1730 llvm_unreachable("top-down pointer in release state!");
1731 }
1732 break;
1733 }
1734 case IC_AutoreleasepoolPop:
1735 // Conservatively, clear MyStates for all known pointers.
1736 MyStates.clearTopDownPointers();
1737 return NestingDetected;
1738 case IC_AutoreleasepoolPush:
1739 case IC_None:
1740 // These are irrelevant.
1741 return NestingDetected;
1742 default:
1743 break;
1744 }
1745
1746 // Consider any other possible effects of this instruction on each
1747 // pointer being tracked.
1748 for (BBState::ptr_iterator MI = MyStates.top_down_ptr_begin(),
1749 ME = MyStates.top_down_ptr_end(); MI != ME; ++MI) {
1750 const Value *Ptr = MI->first;
1751 if (Ptr == Arg)
1752 continue; // Handled above.
1753 PtrState &S = MI->second;
1754 Sequence Seq = S.GetSeq();
1755
1756 // Check for possible releases.
1757 if (CanAlterRefCount(Inst, Ptr, PA, Class)) {
Dan Gohman62079b42012-04-25 00:50:46 +00001758 S.ClearRefCount();
John McCalld935e9c2011-06-15 23:37:01 +00001759 switch (Seq) {
Dan Gohman817a7c62012-03-22 18:24:56 +00001760 case S_Retain:
1761 S.SetSeq(S_CanRelease);
1762 assert(S.RRI.ReverseInsertPts.empty());
1763 S.RRI.ReverseInsertPts.insert(Inst);
1764
1765 // One call can't cause a transition from S_Retain to S_CanRelease
1766 // and S_CanRelease to S_Use. If we've made the first transition,
1767 // we're done.
1768 continue;
John McCalld935e9c2011-06-15 23:37:01 +00001769 case S_Use:
Dan Gohman817a7c62012-03-22 18:24:56 +00001770 case S_CanRelease:
John McCalld935e9c2011-06-15 23:37:01 +00001771 case S_None:
1772 break;
Dan Gohman817a7c62012-03-22 18:24:56 +00001773 case S_Stop:
1774 case S_Release:
1775 case S_MovableRelease:
1776 llvm_unreachable("top-down pointer in release state!");
John McCalld935e9c2011-06-15 23:37:01 +00001777 }
1778 }
Dan Gohman817a7c62012-03-22 18:24:56 +00001779
1780 // Check for possible direct uses.
1781 switch (Seq) {
1782 case S_CanRelease:
1783 if (CanUse(Inst, Ptr, PA, Class))
1784 S.SetSeq(S_Use);
1785 break;
1786 case S_Retain:
1787 case S_Use:
1788 case S_None:
1789 break;
1790 case S_Stop:
1791 case S_Release:
1792 case S_MovableRelease:
1793 llvm_unreachable("top-down pointer in release state!");
1794 }
John McCalld935e9c2011-06-15 23:37:01 +00001795 }
1796
1797 return NestingDetected;
1798}
1799
1800bool
1801ObjCARCOpt::VisitTopDown(BasicBlock *BB,
1802 DenseMap<const BasicBlock *, BBState> &BBStates,
1803 DenseMap<Value *, RRInfo> &Releases) {
1804 bool NestingDetected = false;
1805 BBState &MyStates = BBStates[BB];
1806
1807 // Merge the states from each predecessor to compute the initial state
1808 // for the current block.
Dan Gohman10c82ce2012-08-27 18:31:36 +00001809 BBState::edge_iterator PI(MyStates.pred_begin()),
1810 PE(MyStates.pred_end());
1811 if (PI != PE) {
Dan Gohmanc24c66f2012-04-24 22:53:18 +00001812 const BasicBlock *Pred = *PI;
1813 DenseMap<const BasicBlock *, BBState>::iterator I = BBStates.find(Pred);
1814 assert(I != BBStates.end());
1815 MyStates.InitFromPred(I->second);
1816 ++PI;
1817 for (; PI != PE; ++PI) {
1818 Pred = *PI;
1819 I = BBStates.find(Pred);
1820 assert(I != BBStates.end());
1821 MyStates.MergePred(I->second);
1822 }
Dan Gohmanc24c66f2012-04-24 22:53:18 +00001823 }
John McCalld935e9c2011-06-15 23:37:01 +00001824
1825 // Visit all the instructions, top-down.
1826 for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E; ++I) {
1827 Instruction *Inst = I;
Michael Gottesmanaf2113f2013-01-13 07:00:51 +00001828
1829 DEBUG(dbgs() << "ObjCARCOpt::VisitTopDown: Visiting " << *Inst << "\n");
1830
Dan Gohman817a7c62012-03-22 18:24:56 +00001831 NestingDetected |= VisitInstructionTopDown(Inst, Releases, MyStates);
John McCalld935e9c2011-06-15 23:37:01 +00001832 }
1833
1834 CheckForCFGHazards(BB, BBStates, MyStates);
1835 return NestingDetected;
1836}
1837
Dan Gohmana53a12c2011-12-12 19:42:25 +00001838static void
1839ComputePostOrders(Function &F,
1840 SmallVectorImpl<BasicBlock *> &PostOrder,
Dan Gohmanc24c66f2012-04-24 22:53:18 +00001841 SmallVectorImpl<BasicBlock *> &ReverseCFGPostOrder,
1842 unsigned NoObjCARCExceptionsMDKind,
1843 DenseMap<const BasicBlock *, BBState> &BBStates) {
Michael Gottesman97e3df02013-01-14 00:35:14 +00001844 /// The visited set, for doing DFS walks.
Dan Gohmana53a12c2011-12-12 19:42:25 +00001845 SmallPtrSet<BasicBlock *, 16> Visited;
1846
1847 // Do DFS, computing the PostOrder.
1848 SmallPtrSet<BasicBlock *, 16> OnStack;
1849 SmallVector<std::pair<BasicBlock *, succ_iterator>, 16> SuccStack;
Dan Gohmanc24c66f2012-04-24 22:53:18 +00001850
1851 // Functions always have exactly one entry block, and we don't have
1852 // any other block that we treat like an entry block.
Dan Gohmana53a12c2011-12-12 19:42:25 +00001853 BasicBlock *EntryBB = &F.getEntryBlock();
Dan Gohman41375a32012-05-08 23:39:44 +00001854 BBState &MyStates = BBStates[EntryBB];
1855 MyStates.SetAsEntry();
1856 TerminatorInst *EntryTI = cast<TerminatorInst>(&EntryBB->back());
1857 SuccStack.push_back(std::make_pair(EntryBB, succ_iterator(EntryTI)));
Dan Gohmana53a12c2011-12-12 19:42:25 +00001858 Visited.insert(EntryBB);
1859 OnStack.insert(EntryBB);
1860 do {
1861 dfs_next_succ:
Dan Gohmanc24c66f2012-04-24 22:53:18 +00001862 BasicBlock *CurrBB = SuccStack.back().first;
1863 TerminatorInst *TI = cast<TerminatorInst>(&CurrBB->back());
1864 succ_iterator SE(TI, false);
Dan Gohman41375a32012-05-08 23:39:44 +00001865
Dan Gohmanc24c66f2012-04-24 22:53:18 +00001866 while (SuccStack.back().second != SE) {
1867 BasicBlock *SuccBB = *SuccStack.back().second++;
1868 if (Visited.insert(SuccBB)) {
Dan Gohman41375a32012-05-08 23:39:44 +00001869 TerminatorInst *TI = cast<TerminatorInst>(&SuccBB->back());
1870 SuccStack.push_back(std::make_pair(SuccBB, succ_iterator(TI)));
Dan Gohmanc24c66f2012-04-24 22:53:18 +00001871 BBStates[CurrBB].addSucc(SuccBB);
Dan Gohman41375a32012-05-08 23:39:44 +00001872 BBState &SuccStates = BBStates[SuccBB];
1873 SuccStates.addPred(CurrBB);
Dan Gohmanc24c66f2012-04-24 22:53:18 +00001874 OnStack.insert(SuccBB);
Dan Gohmana53a12c2011-12-12 19:42:25 +00001875 goto dfs_next_succ;
1876 }
Dan Gohmanc24c66f2012-04-24 22:53:18 +00001877
1878 if (!OnStack.count(SuccBB)) {
1879 BBStates[CurrBB].addSucc(SuccBB);
1880 BBStates[SuccBB].addPred(CurrBB);
1881 }
Dan Gohmana53a12c2011-12-12 19:42:25 +00001882 }
Dan Gohmanc24c66f2012-04-24 22:53:18 +00001883 OnStack.erase(CurrBB);
1884 PostOrder.push_back(CurrBB);
1885 SuccStack.pop_back();
Dan Gohmana53a12c2011-12-12 19:42:25 +00001886 } while (!SuccStack.empty());
1887
1888 Visited.clear();
1889
Dan Gohmana53a12c2011-12-12 19:42:25 +00001890 // Do reverse-CFG DFS, computing the reverse-CFG PostOrder.
Dan Gohmanc24c66f2012-04-24 22:53:18 +00001891 // Functions may have many exits, and there also blocks which we treat
1892 // as exits due to ignored edges.
1893 SmallVector<std::pair<BasicBlock *, BBState::edge_iterator>, 16> PredStack;
1894 for (Function::iterator I = F.begin(), E = F.end(); I != E; ++I) {
1895 BasicBlock *ExitBB = I;
1896 BBState &MyStates = BBStates[ExitBB];
1897 if (!MyStates.isExit())
1898 continue;
1899
Dan Gohmandae33492012-04-27 18:56:31 +00001900 MyStates.SetAsExit();
Dan Gohmanc24c66f2012-04-24 22:53:18 +00001901
1902 PredStack.push_back(std::make_pair(ExitBB, MyStates.pred_begin()));
Dan Gohmana53a12c2011-12-12 19:42:25 +00001903 Visited.insert(ExitBB);
1904 while (!PredStack.empty()) {
1905 reverse_dfs_next_succ:
Dan Gohmanc24c66f2012-04-24 22:53:18 +00001906 BBState::edge_iterator PE = BBStates[PredStack.back().first].pred_end();
1907 while (PredStack.back().second != PE) {
Dan Gohmana53a12c2011-12-12 19:42:25 +00001908 BasicBlock *BB = *PredStack.back().second++;
Dan Gohmana53a12c2011-12-12 19:42:25 +00001909 if (Visited.insert(BB)) {
Dan Gohmanc24c66f2012-04-24 22:53:18 +00001910 PredStack.push_back(std::make_pair(BB, BBStates[BB].pred_begin()));
Dan Gohmana53a12c2011-12-12 19:42:25 +00001911 goto reverse_dfs_next_succ;
1912 }
1913 }
1914 ReverseCFGPostOrder.push_back(PredStack.pop_back_val().first);
1915 }
1916 }
1917}
1918
Michael Gottesman97e3df02013-01-14 00:35:14 +00001919// Visit the function both top-down and bottom-up.
John McCalld935e9c2011-06-15 23:37:01 +00001920bool
1921ObjCARCOpt::Visit(Function &F,
1922 DenseMap<const BasicBlock *, BBState> &BBStates,
1923 MapVector<Value *, RRInfo> &Retains,
1924 DenseMap<Value *, RRInfo> &Releases) {
Dan Gohmana53a12c2011-12-12 19:42:25 +00001925
1926 // Use reverse-postorder traversals, because we magically know that loops
1927 // will be well behaved, i.e. they won't repeatedly call retain on a single
1928 // pointer without doing a release. We can't use the ReversePostOrderTraversal
1929 // class here because we want the reverse-CFG postorder to consider each
1930 // function exit point, and we want to ignore selected cycle edges.
1931 SmallVector<BasicBlock *, 16> PostOrder;
1932 SmallVector<BasicBlock *, 16> ReverseCFGPostOrder;
Dan Gohmanc24c66f2012-04-24 22:53:18 +00001933 ComputePostOrders(F, PostOrder, ReverseCFGPostOrder,
1934 NoObjCARCExceptionsMDKind,
1935 BBStates);
Dan Gohmana53a12c2011-12-12 19:42:25 +00001936
1937 // Use reverse-postorder on the reverse CFG for bottom-up.
John McCalld935e9c2011-06-15 23:37:01 +00001938 bool BottomUpNestingDetected = false;
Dan Gohmanc57b58c2011-08-18 21:27:42 +00001939 for (SmallVectorImpl<BasicBlock *>::const_reverse_iterator I =
Dan Gohmana53a12c2011-12-12 19:42:25 +00001940 ReverseCFGPostOrder.rbegin(), E = ReverseCFGPostOrder.rend();
1941 I != E; ++I)
1942 BottomUpNestingDetected |= VisitBottomUp(*I, BBStates, Retains);
John McCalld935e9c2011-06-15 23:37:01 +00001943
Dan Gohmana53a12c2011-12-12 19:42:25 +00001944 // Use reverse-postorder for top-down.
John McCalld935e9c2011-06-15 23:37:01 +00001945 bool TopDownNestingDetected = false;
Dan Gohmana53a12c2011-12-12 19:42:25 +00001946 for (SmallVectorImpl<BasicBlock *>::const_reverse_iterator I =
1947 PostOrder.rbegin(), E = PostOrder.rend();
1948 I != E; ++I)
1949 TopDownNestingDetected |= VisitTopDown(*I, BBStates, Releases);
John McCalld935e9c2011-06-15 23:37:01 +00001950
1951 return TopDownNestingDetected && BottomUpNestingDetected;
1952}
1953
Michael Gottesman97e3df02013-01-14 00:35:14 +00001954/// Move the calls in RetainsToMove and ReleasesToMove.
John McCalld935e9c2011-06-15 23:37:01 +00001955void ObjCARCOpt::MoveCalls(Value *Arg,
1956 RRInfo &RetainsToMove,
1957 RRInfo &ReleasesToMove,
1958 MapVector<Value *, RRInfo> &Retains,
1959 DenseMap<Value *, RRInfo> &Releases,
Dan Gohman6320f522011-07-22 22:29:21 +00001960 SmallVectorImpl<Instruction *> &DeadInsts,
1961 Module *M) {
Chris Lattner229907c2011-07-18 04:54:35 +00001962 Type *ArgTy = Arg->getType();
Dan Gohman6320f522011-07-22 22:29:21 +00001963 Type *ParamTy = PointerType::getUnqual(Type::getInt8Ty(ArgTy->getContext()));
John McCalld935e9c2011-06-15 23:37:01 +00001964
1965 // Insert the new retain and release calls.
1966 for (SmallPtrSet<Instruction *, 2>::const_iterator
1967 PI = ReleasesToMove.ReverseInsertPts.begin(),
1968 PE = ReleasesToMove.ReverseInsertPts.end(); PI != PE; ++PI) {
1969 Instruction *InsertPt = *PI;
1970 Value *MyArg = ArgTy == ParamTy ? Arg :
1971 new BitCastInst(Arg, ParamTy, "", InsertPt);
1972 CallInst *Call =
1973 CallInst::Create(RetainsToMove.IsRetainBlock ?
Dan Gohman6320f522011-07-22 22:29:21 +00001974 getRetainBlockCallee(M) : getRetainCallee(M),
John McCalld935e9c2011-06-15 23:37:01 +00001975 MyArg, "", InsertPt);
1976 Call->setDoesNotThrow();
Dan Gohman728db492012-01-13 00:39:07 +00001977 if (RetainsToMove.IsRetainBlock)
Dan Gohmana7107f92011-10-17 22:53:25 +00001978 Call->setMetadata(CopyOnEscapeMDKind,
1979 MDNode::get(M->getContext(), ArrayRef<Value *>()));
Dan Gohman728db492012-01-13 00:39:07 +00001980 else
John McCalld935e9c2011-06-15 23:37:01 +00001981 Call->setTailCall();
Michael Gottesmanc189a392013-01-09 19:23:24 +00001982
1983 DEBUG(dbgs() << "ObjCARCOpt::MoveCalls: Inserting new Release: " << *Call
1984 << "\n"
1985 " At insertion point: " << *InsertPt
1986 << "\n");
John McCalld935e9c2011-06-15 23:37:01 +00001987 }
1988 for (SmallPtrSet<Instruction *, 2>::const_iterator
1989 PI = RetainsToMove.ReverseInsertPts.begin(),
1990 PE = RetainsToMove.ReverseInsertPts.end(); PI != PE; ++PI) {
Dan Gohman5c70fad2012-03-23 17:47:54 +00001991 Instruction *InsertPt = *PI;
1992 Value *MyArg = ArgTy == ParamTy ? Arg :
1993 new BitCastInst(Arg, ParamTy, "", InsertPt);
1994 CallInst *Call = CallInst::Create(getReleaseCallee(M), MyArg,
1995 "", InsertPt);
1996 // Attach a clang.imprecise_release metadata tag, if appropriate.
1997 if (MDNode *M = ReleasesToMove.ReleaseMetadata)
1998 Call->setMetadata(ImpreciseReleaseMDKind, M);
1999 Call->setDoesNotThrow();
2000 if (ReleasesToMove.IsTailCallRelease)
2001 Call->setTailCall();
Michael Gottesmanc189a392013-01-09 19:23:24 +00002002
2003 DEBUG(dbgs() << "ObjCARCOpt::MoveCalls: Inserting new Retain: " << *Call
2004 << "\n"
2005 " At insertion point: " << *InsertPt
2006 << "\n");
John McCalld935e9c2011-06-15 23:37:01 +00002007 }
2008
2009 // Delete the original retain and release calls.
2010 for (SmallPtrSet<Instruction *, 2>::const_iterator
2011 AI = RetainsToMove.Calls.begin(),
2012 AE = RetainsToMove.Calls.end(); AI != AE; ++AI) {
2013 Instruction *OrigRetain = *AI;
2014 Retains.blot(OrigRetain);
2015 DeadInsts.push_back(OrigRetain);
Michael Gottesmanc189a392013-01-09 19:23:24 +00002016 DEBUG(dbgs() << "ObjCARCOpt::MoveCalls: Deleting retain: " << *OrigRetain <<
2017 "\n");
John McCalld935e9c2011-06-15 23:37:01 +00002018 }
2019 for (SmallPtrSet<Instruction *, 2>::const_iterator
2020 AI = ReleasesToMove.Calls.begin(),
2021 AE = ReleasesToMove.Calls.end(); AI != AE; ++AI) {
2022 Instruction *OrigRelease = *AI;
2023 Releases.erase(OrigRelease);
2024 DeadInsts.push_back(OrigRelease);
Michael Gottesmanc189a392013-01-09 19:23:24 +00002025 DEBUG(dbgs() << "ObjCARCOpt::MoveCalls: Deleting release: " << *OrigRelease
2026 << "\n");
John McCalld935e9c2011-06-15 23:37:01 +00002027 }
2028}
2029
Michael Gottesman9de6f962013-01-22 21:49:00 +00002030bool
2031ObjCARCOpt::ConnectTDBUTraversals(DenseMap<const BasicBlock *, BBState>
2032 &BBStates,
2033 MapVector<Value *, RRInfo> &Retains,
2034 DenseMap<Value *, RRInfo> &Releases,
2035 Module *M,
2036 SmallVector<Instruction *, 4> &NewRetains,
2037 SmallVector<Instruction *, 4> &NewReleases,
2038 SmallVector<Instruction *, 8> &DeadInsts,
2039 RRInfo &RetainsToMove,
2040 RRInfo &ReleasesToMove,
2041 Value *Arg,
2042 bool KnownSafe,
2043 bool &AnyPairsCompletelyEliminated) {
2044 // If a pair happens in a region where it is known that the reference count
2045 // is already incremented, we can similarly ignore possible decrements.
2046 bool KnownSafeTD = true, KnownSafeBU = true;
2047
2048 // Connect the dots between the top-down-collected RetainsToMove and
2049 // bottom-up-collected ReleasesToMove to form sets of related calls.
2050 // This is an iterative process so that we connect multiple releases
2051 // to multiple retains if needed.
2052 unsigned OldDelta = 0;
2053 unsigned NewDelta = 0;
2054 unsigned OldCount = 0;
2055 unsigned NewCount = 0;
2056 bool FirstRelease = true;
2057 bool FirstRetain = true;
2058 for (;;) {
2059 for (SmallVectorImpl<Instruction *>::const_iterator
2060 NI = NewRetains.begin(), NE = NewRetains.end(); NI != NE; ++NI) {
2061 Instruction *NewRetain = *NI;
2062 MapVector<Value *, RRInfo>::const_iterator It = Retains.find(NewRetain);
2063 assert(It != Retains.end());
2064 const RRInfo &NewRetainRRI = It->second;
2065 KnownSafeTD &= NewRetainRRI.KnownSafe;
2066 for (SmallPtrSet<Instruction *, 2>::const_iterator
2067 LI = NewRetainRRI.Calls.begin(),
2068 LE = NewRetainRRI.Calls.end(); LI != LE; ++LI) {
2069 Instruction *NewRetainRelease = *LI;
2070 DenseMap<Value *, RRInfo>::const_iterator Jt =
2071 Releases.find(NewRetainRelease);
2072 if (Jt == Releases.end())
2073 return false;
2074 const RRInfo &NewRetainReleaseRRI = Jt->second;
2075 assert(NewRetainReleaseRRI.Calls.count(NewRetain));
2076 if (ReleasesToMove.Calls.insert(NewRetainRelease)) {
2077 OldDelta -=
2078 BBStates[NewRetainRelease->getParent()].GetAllPathCount();
2079
2080 // Merge the ReleaseMetadata and IsTailCallRelease values.
2081 if (FirstRelease) {
2082 ReleasesToMove.ReleaseMetadata =
2083 NewRetainReleaseRRI.ReleaseMetadata;
2084 ReleasesToMove.IsTailCallRelease =
2085 NewRetainReleaseRRI.IsTailCallRelease;
2086 FirstRelease = false;
2087 } else {
2088 if (ReleasesToMove.ReleaseMetadata !=
2089 NewRetainReleaseRRI.ReleaseMetadata)
2090 ReleasesToMove.ReleaseMetadata = 0;
2091 if (ReleasesToMove.IsTailCallRelease !=
2092 NewRetainReleaseRRI.IsTailCallRelease)
2093 ReleasesToMove.IsTailCallRelease = false;
2094 }
2095
2096 // Collect the optimal insertion points.
2097 if (!KnownSafe)
2098 for (SmallPtrSet<Instruction *, 2>::const_iterator
2099 RI = NewRetainReleaseRRI.ReverseInsertPts.begin(),
2100 RE = NewRetainReleaseRRI.ReverseInsertPts.end();
2101 RI != RE; ++RI) {
2102 Instruction *RIP = *RI;
2103 if (ReleasesToMove.ReverseInsertPts.insert(RIP))
2104 NewDelta -= BBStates[RIP->getParent()].GetAllPathCount();
2105 }
2106 NewReleases.push_back(NewRetainRelease);
2107 }
2108 }
2109 }
2110 NewRetains.clear();
2111 if (NewReleases.empty()) break;
2112
2113 // Back the other way.
2114 for (SmallVectorImpl<Instruction *>::const_iterator
2115 NI = NewReleases.begin(), NE = NewReleases.end(); NI != NE; ++NI) {
2116 Instruction *NewRelease = *NI;
2117 DenseMap<Value *, RRInfo>::const_iterator It =
2118 Releases.find(NewRelease);
2119 assert(It != Releases.end());
2120 const RRInfo &NewReleaseRRI = It->second;
2121 KnownSafeBU &= NewReleaseRRI.KnownSafe;
2122 for (SmallPtrSet<Instruction *, 2>::const_iterator
2123 LI = NewReleaseRRI.Calls.begin(),
2124 LE = NewReleaseRRI.Calls.end(); LI != LE; ++LI) {
2125 Instruction *NewReleaseRetain = *LI;
2126 MapVector<Value *, RRInfo>::const_iterator Jt =
2127 Retains.find(NewReleaseRetain);
2128 if (Jt == Retains.end())
2129 return false;
2130 const RRInfo &NewReleaseRetainRRI = Jt->second;
2131 assert(NewReleaseRetainRRI.Calls.count(NewRelease));
2132 if (RetainsToMove.Calls.insert(NewReleaseRetain)) {
2133 unsigned PathCount =
2134 BBStates[NewReleaseRetain->getParent()].GetAllPathCount();
2135 OldDelta += PathCount;
2136 OldCount += PathCount;
2137
2138 // Merge the IsRetainBlock values.
2139 if (FirstRetain) {
2140 RetainsToMove.IsRetainBlock = NewReleaseRetainRRI.IsRetainBlock;
2141 FirstRetain = false;
2142 } else if (ReleasesToMove.IsRetainBlock !=
2143 NewReleaseRetainRRI.IsRetainBlock)
2144 // It's not possible to merge the sequences if one uses
2145 // objc_retain and the other uses objc_retainBlock.
2146 return false;
2147
2148 // Collect the optimal insertion points.
2149 if (!KnownSafe)
2150 for (SmallPtrSet<Instruction *, 2>::const_iterator
2151 RI = NewReleaseRetainRRI.ReverseInsertPts.begin(),
2152 RE = NewReleaseRetainRRI.ReverseInsertPts.end();
2153 RI != RE; ++RI) {
2154 Instruction *RIP = *RI;
2155 if (RetainsToMove.ReverseInsertPts.insert(RIP)) {
2156 PathCount = BBStates[RIP->getParent()].GetAllPathCount();
2157 NewDelta += PathCount;
2158 NewCount += PathCount;
2159 }
2160 }
2161 NewRetains.push_back(NewReleaseRetain);
2162 }
2163 }
2164 }
2165 NewReleases.clear();
2166 if (NewRetains.empty()) break;
2167 }
2168
2169 // If the pointer is known incremented or nested, we can safely delete the
2170 // pair regardless of what's between them.
2171 if (KnownSafeTD || KnownSafeBU) {
2172 RetainsToMove.ReverseInsertPts.clear();
2173 ReleasesToMove.ReverseInsertPts.clear();
2174 NewCount = 0;
2175 } else {
2176 // Determine whether the new insertion points we computed preserve the
2177 // balance of retain and release calls through the program.
2178 // TODO: If the fully aggressive solution isn't valid, try to find a
2179 // less aggressive solution which is.
2180 if (NewDelta != 0)
2181 return false;
2182 }
2183
2184 // Determine whether the original call points are balanced in the retain and
2185 // release calls through the program. If not, conservatively don't touch
2186 // them.
2187 // TODO: It's theoretically possible to do code motion in this case, as
2188 // long as the existing imbalances are maintained.
2189 if (OldDelta != 0)
2190 return false;
2191
2192 Changed = true;
2193 assert(OldCount != 0 && "Unreachable code?");
2194 NumRRs += OldCount - NewCount;
Michael Gottesman9de6f962013-01-22 21:49:00 +00002195 // Set to true if we completely removed any RR pairs.
Michael Gottesman8b5515f2013-01-22 21:53:43 +00002196 AnyPairsCompletelyEliminated = NewCount == 0;
Michael Gottesman9de6f962013-01-22 21:49:00 +00002197
2198 // We can move calls!
2199 return true;
2200}
2201
Michael Gottesman97e3df02013-01-14 00:35:14 +00002202/// Identify pairings between the retains and releases, and delete and/or move
2203/// them.
John McCalld935e9c2011-06-15 23:37:01 +00002204bool
2205ObjCARCOpt::PerformCodePlacement(DenseMap<const BasicBlock *, BBState>
2206 &BBStates,
2207 MapVector<Value *, RRInfo> &Retains,
Dan Gohman6320f522011-07-22 22:29:21 +00002208 DenseMap<Value *, RRInfo> &Releases,
2209 Module *M) {
John McCalld935e9c2011-06-15 23:37:01 +00002210 bool AnyPairsCompletelyEliminated = false;
2211 RRInfo RetainsToMove;
2212 RRInfo ReleasesToMove;
2213 SmallVector<Instruction *, 4> NewRetains;
2214 SmallVector<Instruction *, 4> NewReleases;
2215 SmallVector<Instruction *, 8> DeadInsts;
2216
Dan Gohman670f9372012-04-13 18:57:48 +00002217 // Visit each retain.
John McCalld935e9c2011-06-15 23:37:01 +00002218 for (MapVector<Value *, RRInfo>::const_iterator I = Retains.begin(),
Dan Gohman2053a5d2011-09-29 22:25:23 +00002219 E = Retains.end(); I != E; ++I) {
2220 Value *V = I->first;
John McCalld935e9c2011-06-15 23:37:01 +00002221 if (!V) continue; // blotted
2222
2223 Instruction *Retain = cast<Instruction>(V);
Michael Gottesmanc189a392013-01-09 19:23:24 +00002224
2225 DEBUG(dbgs() << "ObjCARCOpt::PerformCodePlacement: Visiting: " << *Retain
2226 << "\n");
2227
John McCalld935e9c2011-06-15 23:37:01 +00002228 Value *Arg = GetObjCArg(Retain);
2229
Dan Gohman728db492012-01-13 00:39:07 +00002230 // If the object being released is in static or stack storage, we know it's
John McCalld935e9c2011-06-15 23:37:01 +00002231 // not being managed by ObjC reference counting, so we can delete pairs
2232 // regardless of what possible decrements or uses lie between them.
Dan Gohman728db492012-01-13 00:39:07 +00002233 bool KnownSafe = isa<Constant>(Arg) || isa<AllocaInst>(Arg);
Dan Gohman41375a32012-05-08 23:39:44 +00002234
Dan Gohman56e1cef2011-08-22 17:29:11 +00002235 // A constant pointer can't be pointing to an object on the heap. It may
2236 // be reference-counted, but it won't be deleted.
2237 if (const LoadInst *LI = dyn_cast<LoadInst>(Arg))
2238 if (const GlobalVariable *GV =
2239 dyn_cast<GlobalVariable>(
2240 StripPointerCastsAndObjCCalls(LI->getPointerOperand())))
2241 if (GV->isConstant())
2242 KnownSafe = true;
2243
John McCalld935e9c2011-06-15 23:37:01 +00002244 // Connect the dots between the top-down-collected RetainsToMove and
2245 // bottom-up-collected ReleasesToMove to form sets of related calls.
John McCalld935e9c2011-06-15 23:37:01 +00002246 NewRetains.push_back(Retain);
Michael Gottesman9de6f962013-01-22 21:49:00 +00002247 bool PerformMoveCalls =
2248 ConnectTDBUTraversals(BBStates, Retains, Releases, M, NewRetains,
2249 NewReleases, DeadInsts, RetainsToMove,
2250 ReleasesToMove, Arg, KnownSafe,
2251 AnyPairsCompletelyEliminated);
John McCalld935e9c2011-06-15 23:37:01 +00002252
Michael Gottesman9de6f962013-01-22 21:49:00 +00002253 if (PerformMoveCalls) {
2254 // Ok, everything checks out and we're all set. Let's move/delete some
2255 // code!
2256 MoveCalls(Arg, RetainsToMove, ReleasesToMove,
2257 Retains, Releases, DeadInsts, M);
John McCalld935e9c2011-06-15 23:37:01 +00002258 }
2259
Michael Gottesman9de6f962013-01-22 21:49:00 +00002260 // Clean up state for next retain.
John McCalld935e9c2011-06-15 23:37:01 +00002261 NewReleases.clear();
2262 NewRetains.clear();
2263 RetainsToMove.clear();
2264 ReleasesToMove.clear();
2265 }
2266
2267 // Now that we're done moving everything, we can delete the newly dead
2268 // instructions, as we no longer need them as insert points.
2269 while (!DeadInsts.empty())
2270 EraseInstruction(DeadInsts.pop_back_val());
2271
2272 return AnyPairsCompletelyEliminated;
2273}
2274
Michael Gottesman97e3df02013-01-14 00:35:14 +00002275/// Weak pointer optimizations.
John McCalld935e9c2011-06-15 23:37:01 +00002276void ObjCARCOpt::OptimizeWeakCalls(Function &F) {
2277 // First, do memdep-style RLE and S2L optimizations. We can't use memdep
2278 // itself because it uses AliasAnalysis and we need to do provenance
2279 // queries instead.
2280 for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ) {
2281 Instruction *Inst = &*I++;
Michael Gottesman3f146e22013-01-01 16:05:48 +00002282
Michael Gottesman9f848ae2013-01-04 21:29:57 +00002283 DEBUG(dbgs() << "ObjCARCOpt::OptimizeWeakCalls: Visiting: " << *Inst <<
Michael Gottesman3f146e22013-01-01 16:05:48 +00002284 "\n");
2285
John McCalld935e9c2011-06-15 23:37:01 +00002286 InstructionClass Class = GetBasicInstructionClass(Inst);
2287 if (Class != IC_LoadWeak && Class != IC_LoadWeakRetained)
2288 continue;
2289
2290 // Delete objc_loadWeak calls with no users.
2291 if (Class == IC_LoadWeak && Inst->use_empty()) {
2292 Inst->eraseFromParent();
2293 continue;
2294 }
2295
2296 // TODO: For now, just look for an earlier available version of this value
2297 // within the same block. Theoretically, we could do memdep-style non-local
2298 // analysis too, but that would want caching. A better approach would be to
2299 // use the technique that EarlyCSE uses.
2300 inst_iterator Current = llvm::prior(I);
2301 BasicBlock *CurrentBB = Current.getBasicBlockIterator();
2302 for (BasicBlock::iterator B = CurrentBB->begin(),
2303 J = Current.getInstructionIterator();
2304 J != B; --J) {
2305 Instruction *EarlierInst = &*llvm::prior(J);
2306 InstructionClass EarlierClass = GetInstructionClass(EarlierInst);
2307 switch (EarlierClass) {
2308 case IC_LoadWeak:
2309 case IC_LoadWeakRetained: {
2310 // If this is loading from the same pointer, replace this load's value
2311 // with that one.
2312 CallInst *Call = cast<CallInst>(Inst);
2313 CallInst *EarlierCall = cast<CallInst>(EarlierInst);
2314 Value *Arg = Call->getArgOperand(0);
2315 Value *EarlierArg = EarlierCall->getArgOperand(0);
2316 switch (PA.getAA()->alias(Arg, EarlierArg)) {
2317 case AliasAnalysis::MustAlias:
2318 Changed = true;
2319 // If the load has a builtin retain, insert a plain retain for it.
2320 if (Class == IC_LoadWeakRetained) {
2321 CallInst *CI =
2322 CallInst::Create(getRetainCallee(F.getParent()), EarlierCall,
2323 "", Call);
2324 CI->setTailCall();
2325 }
2326 // Zap the fully redundant load.
2327 Call->replaceAllUsesWith(EarlierCall);
2328 Call->eraseFromParent();
2329 goto clobbered;
2330 case AliasAnalysis::MayAlias:
2331 case AliasAnalysis::PartialAlias:
2332 goto clobbered;
2333 case AliasAnalysis::NoAlias:
2334 break;
2335 }
2336 break;
2337 }
2338 case IC_StoreWeak:
2339 case IC_InitWeak: {
2340 // If this is storing to the same pointer and has the same size etc.
2341 // replace this load's value with the stored value.
2342 CallInst *Call = cast<CallInst>(Inst);
2343 CallInst *EarlierCall = cast<CallInst>(EarlierInst);
2344 Value *Arg = Call->getArgOperand(0);
2345 Value *EarlierArg = EarlierCall->getArgOperand(0);
2346 switch (PA.getAA()->alias(Arg, EarlierArg)) {
2347 case AliasAnalysis::MustAlias:
2348 Changed = true;
2349 // If the load has a builtin retain, insert a plain retain for it.
2350 if (Class == IC_LoadWeakRetained) {
2351 CallInst *CI =
2352 CallInst::Create(getRetainCallee(F.getParent()), EarlierCall,
2353 "", Call);
2354 CI->setTailCall();
2355 }
2356 // Zap the fully redundant load.
2357 Call->replaceAllUsesWith(EarlierCall->getArgOperand(1));
2358 Call->eraseFromParent();
2359 goto clobbered;
2360 case AliasAnalysis::MayAlias:
2361 case AliasAnalysis::PartialAlias:
2362 goto clobbered;
2363 case AliasAnalysis::NoAlias:
2364 break;
2365 }
2366 break;
2367 }
2368 case IC_MoveWeak:
2369 case IC_CopyWeak:
2370 // TOOD: Grab the copied value.
2371 goto clobbered;
2372 case IC_AutoreleasepoolPush:
2373 case IC_None:
2374 case IC_User:
2375 // Weak pointers are only modified through the weak entry points
2376 // (and arbitrary calls, which could call the weak entry points).
2377 break;
2378 default:
2379 // Anything else could modify the weak pointer.
2380 goto clobbered;
2381 }
2382 }
2383 clobbered:;
2384 }
2385
2386 // Then, for each destroyWeak with an alloca operand, check to see if
2387 // the alloca and all its users can be zapped.
2388 for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ) {
2389 Instruction *Inst = &*I++;
2390 InstructionClass Class = GetBasicInstructionClass(Inst);
2391 if (Class != IC_DestroyWeak)
2392 continue;
2393
2394 CallInst *Call = cast<CallInst>(Inst);
2395 Value *Arg = Call->getArgOperand(0);
2396 if (AllocaInst *Alloca = dyn_cast<AllocaInst>(Arg)) {
2397 for (Value::use_iterator UI = Alloca->use_begin(),
2398 UE = Alloca->use_end(); UI != UE; ++UI) {
Dan Gohmandae33492012-04-27 18:56:31 +00002399 const Instruction *UserInst = cast<Instruction>(*UI);
John McCalld935e9c2011-06-15 23:37:01 +00002400 switch (GetBasicInstructionClass(UserInst)) {
2401 case IC_InitWeak:
2402 case IC_StoreWeak:
2403 case IC_DestroyWeak:
2404 continue;
2405 default:
2406 goto done;
2407 }
2408 }
2409 Changed = true;
2410 for (Value::use_iterator UI = Alloca->use_begin(),
2411 UE = Alloca->use_end(); UI != UE; ) {
2412 CallInst *UserInst = cast<CallInst>(*UI++);
Dan Gohman14862c32012-05-18 22:17:29 +00002413 switch (GetBasicInstructionClass(UserInst)) {
2414 case IC_InitWeak:
2415 case IC_StoreWeak:
2416 // These functions return their second argument.
2417 UserInst->replaceAllUsesWith(UserInst->getArgOperand(1));
2418 break;
2419 case IC_DestroyWeak:
2420 // No return value.
2421 break;
2422 default:
Dan Gohman9c97eea02012-05-21 17:41:28 +00002423 llvm_unreachable("alloca really is used!");
Dan Gohman14862c32012-05-18 22:17:29 +00002424 }
John McCalld935e9c2011-06-15 23:37:01 +00002425 UserInst->eraseFromParent();
2426 }
2427 Alloca->eraseFromParent();
2428 done:;
2429 }
2430 }
Michael Gottesman10426b52013-01-07 21:26:07 +00002431
Michael Gottesman9f848ae2013-01-04 21:29:57 +00002432 DEBUG(dbgs() << "ObjCARCOpt::OptimizeWeakCalls: Finished List.\n\n");
Michael Gottesman10426b52013-01-07 21:26:07 +00002433
John McCalld935e9c2011-06-15 23:37:01 +00002434}
2435
Michael Gottesman97e3df02013-01-14 00:35:14 +00002436/// Identify program paths which execute sequences of retains and releases which
2437/// can be eliminated.
John McCalld935e9c2011-06-15 23:37:01 +00002438bool ObjCARCOpt::OptimizeSequences(Function &F) {
2439 /// Releases, Retains - These are used to store the results of the main flow
2440 /// analysis. These use Value* as the key instead of Instruction* so that the
2441 /// map stays valid when we get around to rewriting code and calls get
2442 /// replaced by arguments.
2443 DenseMap<Value *, RRInfo> Releases;
2444 MapVector<Value *, RRInfo> Retains;
2445
Michael Gottesman97e3df02013-01-14 00:35:14 +00002446 /// This is used during the traversal of the function to track the
John McCalld935e9c2011-06-15 23:37:01 +00002447 /// states for each identified object at each block.
2448 DenseMap<const BasicBlock *, BBState> BBStates;
2449
2450 // Analyze the CFG of the function, and all instructions.
2451 bool NestingDetected = Visit(F, BBStates, Retains, Releases);
2452
2453 // Transform.
Dan Gohman6320f522011-07-22 22:29:21 +00002454 return PerformCodePlacement(BBStates, Retains, Releases, F.getParent()) &&
2455 NestingDetected;
John McCalld935e9c2011-06-15 23:37:01 +00002456}
2457
Michael Gottesman97e3df02013-01-14 00:35:14 +00002458/// Look for this pattern:
Dmitri Gribenko5485acd2012-09-14 14:57:36 +00002459/// \code
John McCalld935e9c2011-06-15 23:37:01 +00002460/// %call = call i8* @something(...)
2461/// %2 = call i8* @objc_retain(i8* %call)
2462/// %3 = call i8* @objc_autorelease(i8* %2)
2463/// ret i8* %3
Dmitri Gribenko5485acd2012-09-14 14:57:36 +00002464/// \endcode
John McCalld935e9c2011-06-15 23:37:01 +00002465/// And delete the retain and autorelease.
2466///
2467/// Otherwise if it's just this:
Dmitri Gribenko5485acd2012-09-14 14:57:36 +00002468/// \code
John McCalld935e9c2011-06-15 23:37:01 +00002469/// %3 = call i8* @objc_autorelease(i8* %2)
2470/// ret i8* %3
Dmitri Gribenko5485acd2012-09-14 14:57:36 +00002471/// \endcode
John McCalld935e9c2011-06-15 23:37:01 +00002472/// convert the autorelease to autoreleaseRV.
2473void ObjCARCOpt::OptimizeReturns(Function &F) {
2474 if (!F.getReturnType()->isPointerTy())
2475 return;
2476
2477 SmallPtrSet<Instruction *, 4> DependingInstructions;
2478 SmallPtrSet<const BasicBlock *, 4> Visited;
2479 for (Function::iterator FI = F.begin(), FE = F.end(); FI != FE; ++FI) {
2480 BasicBlock *BB = FI;
2481 ReturnInst *Ret = dyn_cast<ReturnInst>(&BB->back());
Michael Gottesman3f146e22013-01-01 16:05:48 +00002482
Michael Gottesman9f848ae2013-01-04 21:29:57 +00002483 DEBUG(dbgs() << "ObjCARCOpt::OptimizeReturns: Visiting: " << *Ret << "\n");
Michael Gottesman3f146e22013-01-01 16:05:48 +00002484
John McCalld935e9c2011-06-15 23:37:01 +00002485 if (!Ret) continue;
2486
2487 const Value *Arg = StripPointerCastsAndObjCCalls(Ret->getOperand(0));
2488 FindDependencies(NeedsPositiveRetainCount, Arg,
2489 BB, Ret, DependingInstructions, Visited, PA);
2490 if (DependingInstructions.size() != 1)
2491 goto next_block;
2492
2493 {
2494 CallInst *Autorelease =
2495 dyn_cast_or_null<CallInst>(*DependingInstructions.begin());
2496 if (!Autorelease)
2497 goto next_block;
Dan Gohman41375a32012-05-08 23:39:44 +00002498 InstructionClass AutoreleaseClass = GetBasicInstructionClass(Autorelease);
John McCalld935e9c2011-06-15 23:37:01 +00002499 if (!IsAutorelease(AutoreleaseClass))
2500 goto next_block;
2501 if (GetObjCArg(Autorelease) != Arg)
2502 goto next_block;
2503
2504 DependingInstructions.clear();
2505 Visited.clear();
2506
2507 // Check that there is nothing that can affect the reference
2508 // count between the autorelease and the retain.
2509 FindDependencies(CanChangeRetainCount, Arg,
2510 BB, Autorelease, DependingInstructions, Visited, PA);
2511 if (DependingInstructions.size() != 1)
2512 goto next_block;
2513
2514 {
2515 CallInst *Retain =
2516 dyn_cast_or_null<CallInst>(*DependingInstructions.begin());
2517
2518 // Check that we found a retain with the same argument.
2519 if (!Retain ||
2520 !IsRetain(GetBasicInstructionClass(Retain)) ||
2521 GetObjCArg(Retain) != Arg)
2522 goto next_block;
2523
2524 DependingInstructions.clear();
2525 Visited.clear();
2526
2527 // Convert the autorelease to an autoreleaseRV, since it's
2528 // returning the value.
2529 if (AutoreleaseClass == IC_Autorelease) {
Michael Gottesmana6cb0182013-01-10 02:03:50 +00002530 DEBUG(dbgs() << "ObjCARCOpt::OptimizeReturns: Converting autorelease "
2531 "=> autoreleaseRV since it's returning a value.\n"
2532 " In: " << *Autorelease
2533 << "\n");
John McCalld935e9c2011-06-15 23:37:01 +00002534 Autorelease->setCalledFunction(getAutoreleaseRVCallee(F.getParent()));
Michael Gottesmana6cb0182013-01-10 02:03:50 +00002535 DEBUG(dbgs() << " Out: " << *Autorelease
2536 << "\n");
Michael Gottesmanc9656fa2013-01-12 01:25:15 +00002537 Autorelease->setTailCall(); // Always tail call autoreleaseRV.
John McCalld935e9c2011-06-15 23:37:01 +00002538 AutoreleaseClass = IC_AutoreleaseRV;
2539 }
2540
2541 // Check that there is nothing that can affect the reference
2542 // count between the retain and the call.
Dan Gohman4ac148d2011-09-29 22:27:34 +00002543 // Note that Retain need not be in BB.
2544 FindDependencies(CanChangeRetainCount, Arg, Retain->getParent(), Retain,
John McCalld935e9c2011-06-15 23:37:01 +00002545 DependingInstructions, Visited, PA);
2546 if (DependingInstructions.size() != 1)
2547 goto next_block;
2548
2549 {
2550 CallInst *Call =
2551 dyn_cast_or_null<CallInst>(*DependingInstructions.begin());
2552
2553 // Check that the pointer is the return value of the call.
2554 if (!Call || Arg != Call)
2555 goto next_block;
2556
2557 // Check that the call is a regular call.
2558 InstructionClass Class = GetBasicInstructionClass(Call);
2559 if (Class != IC_CallOrUser && Class != IC_Call)
2560 goto next_block;
2561
2562 // If so, we can zap the retain and autorelease.
2563 Changed = true;
2564 ++NumRets;
Michael Gottesmand61a3b22013-01-07 00:04:56 +00002565 DEBUG(dbgs() << "ObjCARCOpt::OptimizeReturns: Erasing: " << *Retain
2566 << "\n Erasing: "
2567 << *Autorelease << "\n");
John McCalld935e9c2011-06-15 23:37:01 +00002568 EraseInstruction(Retain);
2569 EraseInstruction(Autorelease);
2570 }
2571 }
2572 }
2573
2574 next_block:
2575 DependingInstructions.clear();
2576 Visited.clear();
2577 }
Michael Gottesman10426b52013-01-07 21:26:07 +00002578
Michael Gottesman9f848ae2013-01-04 21:29:57 +00002579 DEBUG(dbgs() << "ObjCARCOpt::OptimizeReturns: Finished List.\n\n");
Michael Gottesman10426b52013-01-07 21:26:07 +00002580
John McCalld935e9c2011-06-15 23:37:01 +00002581}
2582
2583bool ObjCARCOpt::doInitialization(Module &M) {
2584 if (!EnableARCOpts)
2585 return false;
2586
Dan Gohman670f9372012-04-13 18:57:48 +00002587 // If nothing in the Module uses ARC, don't do anything.
Dan Gohmanceaac7c2011-06-20 23:20:43 +00002588 Run = ModuleHasARC(M);
2589 if (!Run)
2590 return false;
2591
John McCalld935e9c2011-06-15 23:37:01 +00002592 // Identify the imprecise release metadata kind.
2593 ImpreciseReleaseMDKind =
2594 M.getContext().getMDKindID("clang.imprecise_release");
Dan Gohmana7107f92011-10-17 22:53:25 +00002595 CopyOnEscapeMDKind =
2596 M.getContext().getMDKindID("clang.arc.copy_on_escape");
Dan Gohman0155f302012-02-17 18:59:53 +00002597 NoObjCARCExceptionsMDKind =
2598 M.getContext().getMDKindID("clang.arc.no_objc_arc_exceptions");
John McCalld935e9c2011-06-15 23:37:01 +00002599
John McCalld935e9c2011-06-15 23:37:01 +00002600 // Intuitively, objc_retain and others are nocapture, however in practice
2601 // they are not, because they return their argument value. And objc_release
Dan Gohmandae33492012-04-27 18:56:31 +00002602 // calls finalizers which can have arbitrary side effects.
John McCalld935e9c2011-06-15 23:37:01 +00002603
2604 // These are initialized lazily.
2605 RetainRVCallee = 0;
2606 AutoreleaseRVCallee = 0;
2607 ReleaseCallee = 0;
2608 RetainCallee = 0;
Dan Gohman6320f522011-07-22 22:29:21 +00002609 RetainBlockCallee = 0;
John McCalld935e9c2011-06-15 23:37:01 +00002610 AutoreleaseCallee = 0;
2611
2612 return false;
2613}
2614
2615bool ObjCARCOpt::runOnFunction(Function &F) {
2616 if (!EnableARCOpts)
2617 return false;
2618
Dan Gohmanceaac7c2011-06-20 23:20:43 +00002619 // If nothing in the Module uses ARC, don't do anything.
2620 if (!Run)
2621 return false;
2622
John McCalld935e9c2011-06-15 23:37:01 +00002623 Changed = false;
2624
Michael Gottesmanb24bdef2013-01-12 02:57:16 +00002625 DEBUG(dbgs() << "ObjCARCOpt: Visiting Function: " << F.getName() << "\n");
2626
John McCalld935e9c2011-06-15 23:37:01 +00002627 PA.setAA(&getAnalysis<AliasAnalysis>());
2628
2629 // This pass performs several distinct transformations. As a compile-time aid
2630 // when compiling code that isn't ObjC, skip these if the relevant ObjC
2631 // library functions aren't declared.
2632
2633 // Preliminary optimizations. This also computs UsedInThisFunction.
2634 OptimizeIndividualCalls(F);
2635
2636 // Optimizations for weak pointers.
2637 if (UsedInThisFunction & ((1 << IC_LoadWeak) |
2638 (1 << IC_LoadWeakRetained) |
2639 (1 << IC_StoreWeak) |
2640 (1 << IC_InitWeak) |
2641 (1 << IC_CopyWeak) |
2642 (1 << IC_MoveWeak) |
2643 (1 << IC_DestroyWeak)))
2644 OptimizeWeakCalls(F);
2645
2646 // Optimizations for retain+release pairs.
2647 if (UsedInThisFunction & ((1 << IC_Retain) |
2648 (1 << IC_RetainRV) |
2649 (1 << IC_RetainBlock)))
2650 if (UsedInThisFunction & (1 << IC_Release))
2651 // Run OptimizeSequences until it either stops making changes or
2652 // no retain+release pair nesting is detected.
2653 while (OptimizeSequences(F)) {}
2654
2655 // Optimizations if objc_autorelease is used.
Dan Gohman41375a32012-05-08 23:39:44 +00002656 if (UsedInThisFunction & ((1 << IC_Autorelease) |
2657 (1 << IC_AutoreleaseRV)))
John McCalld935e9c2011-06-15 23:37:01 +00002658 OptimizeReturns(F);
2659
Michael Gottesmanb24bdef2013-01-12 02:57:16 +00002660 DEBUG(dbgs() << "\n");
2661
John McCalld935e9c2011-06-15 23:37:01 +00002662 return Changed;
2663}
2664
2665void ObjCARCOpt::releaseMemory() {
2666 PA.clear();
2667}
2668
Michael Gottesman97e3df02013-01-14 00:35:14 +00002669/// @}
2670///