blob: 58cac76d1e602f2a286c4ae8c6956ee0500a764d [file] [log] [blame]
Dan Gohman12130272011-08-12 00:26:31 +00001; RUN: opt -objc-arc -S < %s | FileCheck %s
2
3%struct.__objcFastEnumerationState = type { i64, i8**, i64*, [5 x i64] }
4
5@"\01L_OBJC_METH_VAR_NAME_" = internal global [43 x i8] c"countByEnumeratingWithState:objects:count:\00", section "__TEXT,__objc_methname,cstring_literals", align 1
David Blaikief72d05b2015-03-13 18:20:45 +00006@"\01L_OBJC_SELECTOR_REFERENCES_" = internal global i8* getelementptr inbounds ([43 x i8], [43 x i8]* @"\01L_OBJC_METH_VAR_NAME_", i64 0, i64 0), section "__DATA, __objc_selrefs, literal_pointers, no_dead_strip"
Dan Gohman12130272011-08-12 00:26:31 +00007@g = common global i8* null, align 8
8@"\01L_OBJC_IMAGE_INFO" = internal constant [2 x i32] [i32 0, i32 16], section "__DATA, __objc_imageinfo, regular, no_dead_strip"
9
10declare void @callee()
11declare i8* @returner()
12declare i8* @objc_retainAutoreleasedReturnValue(i8*)
13declare i8* @objc_retain(i8*)
14declare void @objc_enumerationMutation(i8*)
15declare void @llvm.memset.p0i8.i64(i8* nocapture, i8, i64, i32, i1) nounwind
16declare i8* @objc_msgSend(i8*, i8*, ...) nonlazybind
17declare void @use(i8*)
18declare void @objc_release(i8*)
Dan Gohmandf476e52012-09-04 23:16:20 +000019declare i8* @def()
20declare void @__crasher_block_invoke(i8* nocapture)
21declare i8* @objc_retainBlock(i8*)
22declare void @__crasher_block_invoke1(i8* nocapture)
Dan Gohman12130272011-08-12 00:26:31 +000023
Duncan P. N. Exon Smithbe7ea192014-12-15 19:07:53 +000024!0 = !{}
Dan Gohman12130272011-08-12 00:26:31 +000025
26; Delete a nested retain+release pair.
27
Stephen Lina76289a2013-07-14 01:50:49 +000028; CHECK-LABEL: define void @test0(
Dan Gohman12130272011-08-12 00:26:31 +000029; CHECK: call i8* @objc_retain
30; CHECK-NOT: @objc_retain
31; CHECK: }
32define void @test0(i8* %a) nounwind {
33entry:
34 %state.ptr = alloca %struct.__objcFastEnumerationState, align 8
35 %items.ptr = alloca [16 x i8*], align 8
36 %0 = call i8* @objc_retain(i8* %a) nounwind
37 %tmp = bitcast %struct.__objcFastEnumerationState* %state.ptr to i8*
38 call void @llvm.memset.p0i8.i64(i8* %tmp, i8 0, i64 64, i32 8, i1 false)
39 %1 = call i8* @objc_retain(i8* %0) nounwind
David Blaikiea79ac142015-02-27 21:17:42 +000040 %tmp2 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
Dan Gohman12130272011-08-12 00:26:31 +000041 %call = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp2, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
42 %iszero = icmp eq i64 %call, 0
43 br i1 %iszero, label %forcoll.empty, label %forcoll.loopinit
44
45forcoll.loopinit:
David Blaikie79e6c742015-02-27 19:29:02 +000046 %mutationsptr.ptr = getelementptr inbounds %struct.__objcFastEnumerationState, %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 2
David Blaikiea79ac142015-02-27 21:17:42 +000047 %mutationsptr = load i64*, i64** %mutationsptr.ptr, align 8
48 %forcoll.initial-mutations = load i64, i64* %mutationsptr, align 8
David Blaikie79e6c742015-02-27 19:29:02 +000049 %stateitems.ptr = getelementptr inbounds %struct.__objcFastEnumerationState, %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 1
Dan Gohman12130272011-08-12 00:26:31 +000050 br label %forcoll.loopbody.outer
51
52forcoll.loopbody.outer:
53 %forcoll.count.ph = phi i64 [ %call, %forcoll.loopinit ], [ %call6, %forcoll.refetch ]
54 %tmp7 = icmp ugt i64 %forcoll.count.ph, 1
55 %umax = select i1 %tmp7, i64 %forcoll.count.ph, i64 1
56 br label %forcoll.loopbody
57
58forcoll.loopbody:
59 %forcoll.index = phi i64 [ 0, %forcoll.loopbody.outer ], [ %4, %forcoll.notmutated ]
David Blaikiea79ac142015-02-27 21:17:42 +000060 %mutationsptr3 = load i64*, i64** %mutationsptr.ptr, align 8
61 %statemutations = load i64, i64* %mutationsptr3, align 8
Dan Gohman12130272011-08-12 00:26:31 +000062 %2 = icmp eq i64 %statemutations, %forcoll.initial-mutations
63 br i1 %2, label %forcoll.notmutated, label %forcoll.mutated
64
65forcoll.mutated:
66 call void @objc_enumerationMutation(i8* %1)
67 br label %forcoll.notmutated
68
69forcoll.notmutated:
David Blaikiea79ac142015-02-27 21:17:42 +000070 %stateitems = load i8**, i8*** %stateitems.ptr, align 8
David Blaikie79e6c742015-02-27 19:29:02 +000071 %currentitem.ptr = getelementptr i8*, i8** %stateitems, i64 %forcoll.index
David Blaikiea79ac142015-02-27 21:17:42 +000072 %3 = load i8*, i8** %currentitem.ptr, align 8
Dan Gohman12130272011-08-12 00:26:31 +000073 call void @use(i8* %3)
74 %4 = add i64 %forcoll.index, 1
75 %exitcond = icmp eq i64 %4, %umax
76 br i1 %exitcond, label %forcoll.refetch, label %forcoll.loopbody
77
78forcoll.refetch:
David Blaikiea79ac142015-02-27 21:17:42 +000079 %tmp5 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
Dan Gohman12130272011-08-12 00:26:31 +000080 %call6 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp5, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
81 %5 = icmp eq i64 %call6, 0
82 br i1 %5, label %forcoll.empty, label %forcoll.loopbody.outer
83
84forcoll.empty:
85 call void @objc_release(i8* %1) nounwind
86 call void @objc_release(i8* %0) nounwind, !clang.imprecise_release !0
87 ret void
88}
89
90; Delete a nested retain+release pair.
91
Stephen Lina76289a2013-07-14 01:50:49 +000092; CHECK-LABEL: define void @test2(
Dan Gohman12130272011-08-12 00:26:31 +000093; CHECK: call i8* @objc_retain
94; CHECK-NOT: @objc_retain
95; CHECK: }
96define void @test2() nounwind {
97entry:
98 %state.ptr = alloca %struct.__objcFastEnumerationState, align 8
99 %items.ptr = alloca [16 x i8*], align 8
100 %call = call i8* @returner()
101 %0 = call i8* @objc_retainAutoreleasedReturnValue(i8* %call) nounwind
102 %tmp = bitcast %struct.__objcFastEnumerationState* %state.ptr to i8*
103 call void @llvm.memset.p0i8.i64(i8* %tmp, i8 0, i64 64, i32 8, i1 false)
104 %1 = call i8* @objc_retain(i8* %0) nounwind
David Blaikiea79ac142015-02-27 21:17:42 +0000105 %tmp2 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
Dan Gohman12130272011-08-12 00:26:31 +0000106 %call3 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp2, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
107 %iszero = icmp eq i64 %call3, 0
108 br i1 %iszero, label %forcoll.empty, label %forcoll.loopinit
109
110forcoll.loopinit:
David Blaikie79e6c742015-02-27 19:29:02 +0000111 %mutationsptr.ptr = getelementptr inbounds %struct.__objcFastEnumerationState, %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 2
David Blaikiea79ac142015-02-27 21:17:42 +0000112 %mutationsptr = load i64*, i64** %mutationsptr.ptr, align 8
113 %forcoll.initial-mutations = load i64, i64* %mutationsptr, align 8
David Blaikie79e6c742015-02-27 19:29:02 +0000114 %stateitems.ptr = getelementptr inbounds %struct.__objcFastEnumerationState, %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 1
Dan Gohman12130272011-08-12 00:26:31 +0000115 br label %forcoll.loopbody.outer
116
117forcoll.loopbody.outer:
118 %forcoll.count.ph = phi i64 [ %call3, %forcoll.loopinit ], [ %call7, %forcoll.refetch ]
119 %tmp8 = icmp ugt i64 %forcoll.count.ph, 1
120 %umax = select i1 %tmp8, i64 %forcoll.count.ph, i64 1
121 br label %forcoll.loopbody
122
123forcoll.loopbody:
124 %forcoll.index = phi i64 [ 0, %forcoll.loopbody.outer ], [ %4, %forcoll.notmutated ]
David Blaikiea79ac142015-02-27 21:17:42 +0000125 %mutationsptr4 = load i64*, i64** %mutationsptr.ptr, align 8
126 %statemutations = load i64, i64* %mutationsptr4, align 8
Dan Gohman12130272011-08-12 00:26:31 +0000127 %2 = icmp eq i64 %statemutations, %forcoll.initial-mutations
128 br i1 %2, label %forcoll.notmutated, label %forcoll.mutated
129
130forcoll.mutated:
131 call void @objc_enumerationMutation(i8* %1)
132 br label %forcoll.notmutated
133
134forcoll.notmutated:
David Blaikiea79ac142015-02-27 21:17:42 +0000135 %stateitems = load i8**, i8*** %stateitems.ptr, align 8
David Blaikie79e6c742015-02-27 19:29:02 +0000136 %currentitem.ptr = getelementptr i8*, i8** %stateitems, i64 %forcoll.index
David Blaikiea79ac142015-02-27 21:17:42 +0000137 %3 = load i8*, i8** %currentitem.ptr, align 8
Dan Gohman12130272011-08-12 00:26:31 +0000138 call void @use(i8* %3)
139 %4 = add i64 %forcoll.index, 1
140 %exitcond = icmp eq i64 %4, %umax
141 br i1 %exitcond, label %forcoll.refetch, label %forcoll.loopbody
142
143forcoll.refetch:
David Blaikiea79ac142015-02-27 21:17:42 +0000144 %tmp6 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
Dan Gohman12130272011-08-12 00:26:31 +0000145 %call7 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp6, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
146 %5 = icmp eq i64 %call7, 0
147 br i1 %5, label %forcoll.empty, label %forcoll.loopbody.outer
148
149forcoll.empty:
150 call void @objc_release(i8* %1) nounwind
151 call void @objc_release(i8* %0) nounwind, !clang.imprecise_release !0
152 ret void
153}
154
155; Delete a nested retain+release pair.
156
Stephen Lina76289a2013-07-14 01:50:49 +0000157; CHECK-LABEL: define void @test4(
Dan Gohman12130272011-08-12 00:26:31 +0000158; CHECK: call i8* @objc_retain
159; CHECK-NOT: @objc_retain
160; CHECK: }
161define void @test4() nounwind {
162entry:
163 %state.ptr = alloca %struct.__objcFastEnumerationState, align 8
164 %items.ptr = alloca [16 x i8*], align 8
David Blaikiea79ac142015-02-27 21:17:42 +0000165 %tmp = load i8*, i8** @g, align 8
Dan Gohman12130272011-08-12 00:26:31 +0000166 %0 = call i8* @objc_retain(i8* %tmp) nounwind
167 %tmp2 = bitcast %struct.__objcFastEnumerationState* %state.ptr to i8*
168 call void @llvm.memset.p0i8.i64(i8* %tmp2, i8 0, i64 64, i32 8, i1 false)
169 %1 = call i8* @objc_retain(i8* %0) nounwind
David Blaikiea79ac142015-02-27 21:17:42 +0000170 %tmp4 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
Dan Gohman12130272011-08-12 00:26:31 +0000171 %call = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp4, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
172 %iszero = icmp eq i64 %call, 0
173 br i1 %iszero, label %forcoll.empty, label %forcoll.loopinit
174
175forcoll.loopinit:
David Blaikie79e6c742015-02-27 19:29:02 +0000176 %mutationsptr.ptr = getelementptr inbounds %struct.__objcFastEnumerationState, %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 2
David Blaikiea79ac142015-02-27 21:17:42 +0000177 %mutationsptr = load i64*, i64** %mutationsptr.ptr, align 8
178 %forcoll.initial-mutations = load i64, i64* %mutationsptr, align 8
David Blaikie79e6c742015-02-27 19:29:02 +0000179 %stateitems.ptr = getelementptr inbounds %struct.__objcFastEnumerationState, %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 1
Dan Gohman12130272011-08-12 00:26:31 +0000180 br label %forcoll.loopbody.outer
181
182forcoll.loopbody.outer:
183 %forcoll.count.ph = phi i64 [ %call, %forcoll.loopinit ], [ %call8, %forcoll.refetch ]
184 %tmp9 = icmp ugt i64 %forcoll.count.ph, 1
185 %umax = select i1 %tmp9, i64 %forcoll.count.ph, i64 1
186 br label %forcoll.loopbody
187
188forcoll.loopbody:
189 %forcoll.index = phi i64 [ 0, %forcoll.loopbody.outer ], [ %4, %forcoll.notmutated ]
David Blaikiea79ac142015-02-27 21:17:42 +0000190 %mutationsptr5 = load i64*, i64** %mutationsptr.ptr, align 8
191 %statemutations = load i64, i64* %mutationsptr5, align 8
Dan Gohman12130272011-08-12 00:26:31 +0000192 %2 = icmp eq i64 %statemutations, %forcoll.initial-mutations
193 br i1 %2, label %forcoll.notmutated, label %forcoll.mutated
194
195forcoll.mutated:
196 call void @objc_enumerationMutation(i8* %1)
197 br label %forcoll.notmutated
198
199forcoll.notmutated:
David Blaikiea79ac142015-02-27 21:17:42 +0000200 %stateitems = load i8**, i8*** %stateitems.ptr, align 8
David Blaikie79e6c742015-02-27 19:29:02 +0000201 %currentitem.ptr = getelementptr i8*, i8** %stateitems, i64 %forcoll.index
David Blaikiea79ac142015-02-27 21:17:42 +0000202 %3 = load i8*, i8** %currentitem.ptr, align 8
Dan Gohman12130272011-08-12 00:26:31 +0000203 call void @use(i8* %3)
204 %4 = add i64 %forcoll.index, 1
205 %exitcond = icmp eq i64 %4, %umax
206 br i1 %exitcond, label %forcoll.refetch, label %forcoll.loopbody
207
208forcoll.refetch:
David Blaikiea79ac142015-02-27 21:17:42 +0000209 %tmp7 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
Dan Gohman12130272011-08-12 00:26:31 +0000210 %call8 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp7, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
211 %5 = icmp eq i64 %call8, 0
212 br i1 %5, label %forcoll.empty, label %forcoll.loopbody.outer
213
214forcoll.empty:
215 call void @objc_release(i8* %1) nounwind
216 call void @objc_release(i8* %0) nounwind, !clang.imprecise_release !0
217 ret void
218}
219
220; Delete a nested retain+release pair.
221
Stephen Lina76289a2013-07-14 01:50:49 +0000222; CHECK-LABEL: define void @test5(
Dan Gohman12130272011-08-12 00:26:31 +0000223; CHECK: call i8* @objc_retain
224; CHECK-NOT: @objc_retain
225; CHECK: }
226define void @test5() nounwind {
227entry:
228 %state.ptr = alloca %struct.__objcFastEnumerationState, align 8
229 %items.ptr = alloca [16 x i8*], align 8
230 %call = call i8* @returner()
231 %0 = call i8* @objc_retainAutoreleasedReturnValue(i8* %call) nounwind
Dan Gohman12130272011-08-12 00:26:31 +0000232 %tmp = bitcast %struct.__objcFastEnumerationState* %state.ptr to i8*
233 call void @llvm.memset.p0i8.i64(i8* %tmp, i8 0, i64 64, i32 8, i1 false)
234 %1 = call i8* @objc_retain(i8* %0) nounwind
David Blaikiea79ac142015-02-27 21:17:42 +0000235 %tmp2 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
Dan Gohman12130272011-08-12 00:26:31 +0000236 %call3 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp2, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
237 %iszero = icmp eq i64 %call3, 0
238 br i1 %iszero, label %forcoll.empty, label %forcoll.loopinit
239
240forcoll.loopinit:
David Blaikie79e6c742015-02-27 19:29:02 +0000241 %mutationsptr.ptr = getelementptr inbounds %struct.__objcFastEnumerationState, %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 2
David Blaikiea79ac142015-02-27 21:17:42 +0000242 %mutationsptr = load i64*, i64** %mutationsptr.ptr, align 8
243 %forcoll.initial-mutations = load i64, i64* %mutationsptr, align 8
David Blaikie79e6c742015-02-27 19:29:02 +0000244 %stateitems.ptr = getelementptr inbounds %struct.__objcFastEnumerationState, %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 1
Dan Gohman12130272011-08-12 00:26:31 +0000245 br label %forcoll.loopbody.outer
246
247forcoll.loopbody.outer:
248 %forcoll.count.ph = phi i64 [ %call3, %forcoll.loopinit ], [ %call7, %forcoll.refetch ]
249 %tmp8 = icmp ugt i64 %forcoll.count.ph, 1
250 %umax = select i1 %tmp8, i64 %forcoll.count.ph, i64 1
251 br label %forcoll.loopbody
252
253forcoll.loopbody:
254 %forcoll.index = phi i64 [ 0, %forcoll.loopbody.outer ], [ %4, %forcoll.notmutated ]
David Blaikiea79ac142015-02-27 21:17:42 +0000255 %mutationsptr4 = load i64*, i64** %mutationsptr.ptr, align 8
256 %statemutations = load i64, i64* %mutationsptr4, align 8
Dan Gohman12130272011-08-12 00:26:31 +0000257 %2 = icmp eq i64 %statemutations, %forcoll.initial-mutations
258 br i1 %2, label %forcoll.notmutated, label %forcoll.mutated
259
260forcoll.mutated:
261 call void @objc_enumerationMutation(i8* %1)
262 br label %forcoll.notmutated
263
264forcoll.notmutated:
David Blaikiea79ac142015-02-27 21:17:42 +0000265 %stateitems = load i8**, i8*** %stateitems.ptr, align 8
David Blaikie79e6c742015-02-27 19:29:02 +0000266 %currentitem.ptr = getelementptr i8*, i8** %stateitems, i64 %forcoll.index
David Blaikiea79ac142015-02-27 21:17:42 +0000267 %3 = load i8*, i8** %currentitem.ptr, align 8
Dan Gohman12130272011-08-12 00:26:31 +0000268 call void @use(i8* %3)
269 %4 = add i64 %forcoll.index, 1
270 %exitcond = icmp eq i64 %4, %umax
271 br i1 %exitcond, label %forcoll.refetch, label %forcoll.loopbody
272
273forcoll.refetch:
David Blaikiea79ac142015-02-27 21:17:42 +0000274 %tmp6 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
Dan Gohman12130272011-08-12 00:26:31 +0000275 %call7 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp6, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
276 %5 = icmp eq i64 %call7, 0
277 br i1 %5, label %forcoll.empty, label %forcoll.loopbody.outer
278
279forcoll.empty:
280 call void @objc_release(i8* %1) nounwind
281 call void @objc_release(i8* %0) nounwind, !clang.imprecise_release !0
282 ret void
283}
284
Michael Gottesmandd60f9b2015-03-16 07:02:36 +0000285; We handle this now due to the fact that a release just needs a post dominating
286; use.
287;
Stephen Lina76289a2013-07-14 01:50:49 +0000288; CHECK-LABEL: define void @test6(
Dan Gohman12130272011-08-12 00:26:31 +0000289; CHECK: call i8* @objc_retain
Michael Gottesmandd60f9b2015-03-16 07:02:36 +0000290; CHECK-NOT: @objc_retain
Dan Gohman12130272011-08-12 00:26:31 +0000291; CHECK: }
292define void @test6() nounwind {
293entry:
294 %state.ptr = alloca %struct.__objcFastEnumerationState, align 8
295 %items.ptr = alloca [16 x i8*], align 8
296 %call = call i8* @returner()
297 %0 = call i8* @objc_retainAutoreleasedReturnValue(i8* %call) nounwind
298 %tmp = bitcast %struct.__objcFastEnumerationState* %state.ptr to i8*
299 call void @llvm.memset.p0i8.i64(i8* %tmp, i8 0, i64 64, i32 8, i1 false)
300 %1 = call i8* @objc_retain(i8* %0) nounwind
David Blaikiea79ac142015-02-27 21:17:42 +0000301 %tmp2 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
Dan Gohman12130272011-08-12 00:26:31 +0000302 %call3 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp2, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
303 %iszero = icmp eq i64 %call3, 0
304 br i1 %iszero, label %forcoll.empty, label %forcoll.loopinit
305
306forcoll.loopinit:
David Blaikie79e6c742015-02-27 19:29:02 +0000307 %mutationsptr.ptr = getelementptr inbounds %struct.__objcFastEnumerationState, %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 2
David Blaikiea79ac142015-02-27 21:17:42 +0000308 %mutationsptr = load i64*, i64** %mutationsptr.ptr, align 8
309 %forcoll.initial-mutations = load i64, i64* %mutationsptr, align 8
David Blaikie79e6c742015-02-27 19:29:02 +0000310 %stateitems.ptr = getelementptr inbounds %struct.__objcFastEnumerationState, %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 1
Dan Gohman12130272011-08-12 00:26:31 +0000311 br label %forcoll.loopbody.outer
312
313forcoll.loopbody.outer:
314 %forcoll.count.ph = phi i64 [ %call3, %forcoll.loopinit ], [ %call7, %forcoll.refetch ]
315 %tmp8 = icmp ugt i64 %forcoll.count.ph, 1
316 %umax = select i1 %tmp8, i64 %forcoll.count.ph, i64 1
317 br label %forcoll.loopbody
318
319forcoll.loopbody:
320 %forcoll.index = phi i64 [ 0, %forcoll.loopbody.outer ], [ %4, %forcoll.notmutated ]
David Blaikiea79ac142015-02-27 21:17:42 +0000321 %mutationsptr4 = load i64*, i64** %mutationsptr.ptr, align 8
322 %statemutations = load i64, i64* %mutationsptr4, align 8
Dan Gohman12130272011-08-12 00:26:31 +0000323 %2 = icmp eq i64 %statemutations, %forcoll.initial-mutations
324 br i1 %2, label %forcoll.notmutated, label %forcoll.mutated
325
326forcoll.mutated:
327 call void @objc_enumerationMutation(i8* %1)
328 br label %forcoll.notmutated
329
330forcoll.notmutated:
David Blaikiea79ac142015-02-27 21:17:42 +0000331 %stateitems = load i8**, i8*** %stateitems.ptr, align 8
David Blaikie79e6c742015-02-27 19:29:02 +0000332 %currentitem.ptr = getelementptr i8*, i8** %stateitems, i64 %forcoll.index
David Blaikiea79ac142015-02-27 21:17:42 +0000333 %3 = load i8*, i8** %currentitem.ptr, align 8
Dan Gohman12130272011-08-12 00:26:31 +0000334 call void @use(i8* %3)
335 %4 = add i64 %forcoll.index, 1
336 %exitcond = icmp eq i64 %4, %umax
337 br i1 %exitcond, label %forcoll.refetch, label %forcoll.loopbody
338
339forcoll.refetch:
David Blaikiea79ac142015-02-27 21:17:42 +0000340 %tmp6 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
Dan Gohman12130272011-08-12 00:26:31 +0000341 %call7 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp6, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
342 %5 = icmp eq i64 %call7, 0
343 br i1 %5, label %forcoll.empty, label %forcoll.loopbody.outer
344
345forcoll.empty:
346 call void @objc_release(i8* %1) nounwind
347 call void @callee()
348 call void @objc_release(i8* %0) nounwind, !clang.imprecise_release !0
349 ret void
350}
351
Dan Gohmandf476e52012-09-04 23:16:20 +0000352; TODO: Delete a nested retain+release pair.
353; The optimizer currently can't do this, because isn't isn't sophisticated enough in
354; reasnoning about nesting.
Dan Gohman12130272011-08-12 00:26:31 +0000355
Stephen Lina76289a2013-07-14 01:50:49 +0000356; CHECK-LABEL: define void @test7(
Dan Gohman12130272011-08-12 00:26:31 +0000357; CHECK: call i8* @objc_retain
Dan Gohmandf476e52012-09-04 23:16:20 +0000358; CHECK: @objc_retain
Dan Gohman12130272011-08-12 00:26:31 +0000359; CHECK: }
360define void @test7() nounwind {
361entry:
362 %state.ptr = alloca %struct.__objcFastEnumerationState, align 8
363 %items.ptr = alloca [16 x i8*], align 8
364 %call = call i8* @returner()
365 %0 = call i8* @objc_retainAutoreleasedReturnValue(i8* %call) nounwind
366 call void @callee()
367 %tmp = bitcast %struct.__objcFastEnumerationState* %state.ptr to i8*
368 call void @llvm.memset.p0i8.i64(i8* %tmp, i8 0, i64 64, i32 8, i1 false)
369 %1 = call i8* @objc_retain(i8* %0) nounwind
David Blaikiea79ac142015-02-27 21:17:42 +0000370 %tmp2 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
Dan Gohman12130272011-08-12 00:26:31 +0000371 %call3 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp2, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
372 %iszero = icmp eq i64 %call3, 0
373 br i1 %iszero, label %forcoll.empty, label %forcoll.loopinit
374
375forcoll.loopinit:
David Blaikie79e6c742015-02-27 19:29:02 +0000376 %mutationsptr.ptr = getelementptr inbounds %struct.__objcFastEnumerationState, %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 2
David Blaikiea79ac142015-02-27 21:17:42 +0000377 %mutationsptr = load i64*, i64** %mutationsptr.ptr, align 8
378 %forcoll.initial-mutations = load i64, i64* %mutationsptr, align 8
David Blaikie79e6c742015-02-27 19:29:02 +0000379 %stateitems.ptr = getelementptr inbounds %struct.__objcFastEnumerationState, %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 1
Dan Gohman12130272011-08-12 00:26:31 +0000380 br label %forcoll.loopbody.outer
381
382forcoll.loopbody.outer:
383 %forcoll.count.ph = phi i64 [ %call3, %forcoll.loopinit ], [ %call7, %forcoll.refetch ]
384 %tmp8 = icmp ugt i64 %forcoll.count.ph, 1
385 %umax = select i1 %tmp8, i64 %forcoll.count.ph, i64 1
386 br label %forcoll.loopbody
387
388forcoll.loopbody:
389 %forcoll.index = phi i64 [ 0, %forcoll.loopbody.outer ], [ %4, %forcoll.notmutated ]
David Blaikiea79ac142015-02-27 21:17:42 +0000390 %mutationsptr4 = load i64*, i64** %mutationsptr.ptr, align 8
391 %statemutations = load i64, i64* %mutationsptr4, align 8
Dan Gohman12130272011-08-12 00:26:31 +0000392 %2 = icmp eq i64 %statemutations, %forcoll.initial-mutations
393 br i1 %2, label %forcoll.notmutated, label %forcoll.mutated
394
395forcoll.mutated:
396 call void @objc_enumerationMutation(i8* %1)
397 br label %forcoll.notmutated
398
399forcoll.notmutated:
David Blaikiea79ac142015-02-27 21:17:42 +0000400 %stateitems = load i8**, i8*** %stateitems.ptr, align 8
David Blaikie79e6c742015-02-27 19:29:02 +0000401 %currentitem.ptr = getelementptr i8*, i8** %stateitems, i64 %forcoll.index
David Blaikiea79ac142015-02-27 21:17:42 +0000402 %3 = load i8*, i8** %currentitem.ptr, align 8
Dan Gohman12130272011-08-12 00:26:31 +0000403 call void @use(i8* %3)
404 %4 = add i64 %forcoll.index, 1
405 %exitcond = icmp eq i64 %4, %umax
406 br i1 %exitcond, label %forcoll.refetch, label %forcoll.loopbody
407
408forcoll.refetch:
David Blaikiea79ac142015-02-27 21:17:42 +0000409 %tmp6 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
Dan Gohman12130272011-08-12 00:26:31 +0000410 %call7 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp6, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
411 %5 = icmp eq i64 %call7, 0
412 br i1 %5, label %forcoll.empty, label %forcoll.loopbody.outer
413
414forcoll.empty:
415 call void @objc_release(i8* %1) nounwind
416 call void @callee()
417 call void @objc_release(i8* %0) nounwind, !clang.imprecise_release !0
418 ret void
419}
420
421; Delete a nested retain+release pair.
422
Stephen Lina76289a2013-07-14 01:50:49 +0000423; CHECK-LABEL: define void @test8(
Dan Gohman12130272011-08-12 00:26:31 +0000424; CHECK: call i8* @objc_retain
425; CHECK-NOT: @objc_retain
426; CHECK: }
427define void @test8() nounwind {
428entry:
429 %state.ptr = alloca %struct.__objcFastEnumerationState, align 8
430 %items.ptr = alloca [16 x i8*], align 8
431 %call = call i8* @returner()
432 %0 = call i8* @objc_retainAutoreleasedReturnValue(i8* %call) nounwind
433 %tmp = bitcast %struct.__objcFastEnumerationState* %state.ptr to i8*
434 call void @llvm.memset.p0i8.i64(i8* %tmp, i8 0, i64 64, i32 8, i1 false)
435 %1 = call i8* @objc_retain(i8* %0) nounwind
David Blaikiea79ac142015-02-27 21:17:42 +0000436 %tmp2 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
Dan Gohman12130272011-08-12 00:26:31 +0000437 %call3 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp2, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
438 %iszero = icmp eq i64 %call3, 0
439 br i1 %iszero, label %forcoll.empty, label %forcoll.loopinit
440
441forcoll.loopinit:
David Blaikie79e6c742015-02-27 19:29:02 +0000442 %mutationsptr.ptr = getelementptr inbounds %struct.__objcFastEnumerationState, %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 2
David Blaikiea79ac142015-02-27 21:17:42 +0000443 %mutationsptr = load i64*, i64** %mutationsptr.ptr, align 8
444 %forcoll.initial-mutations = load i64, i64* %mutationsptr, align 8
David Blaikie79e6c742015-02-27 19:29:02 +0000445 %stateitems.ptr = getelementptr inbounds %struct.__objcFastEnumerationState, %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 1
Dan Gohman12130272011-08-12 00:26:31 +0000446 br label %forcoll.loopbody.outer
447
448forcoll.loopbody.outer:
449 %forcoll.count.ph = phi i64 [ %call3, %forcoll.loopinit ], [ %call7, %forcoll.refetch ]
450 %tmp8 = icmp ugt i64 %forcoll.count.ph, 1
451 %umax = select i1 %tmp8, i64 %forcoll.count.ph, i64 1
452 br label %forcoll.loopbody
453
454forcoll.loopbody:
455 %forcoll.index = phi i64 [ 0, %forcoll.loopbody.outer ], [ %4, %forcoll.next ]
David Blaikiea79ac142015-02-27 21:17:42 +0000456 %mutationsptr4 = load i64*, i64** %mutationsptr.ptr, align 8
457 %statemutations = load i64, i64* %mutationsptr4, align 8
Dan Gohman12130272011-08-12 00:26:31 +0000458 %2 = icmp eq i64 %statemutations, %forcoll.initial-mutations
459 br i1 %2, label %forcoll.notmutated, label %forcoll.mutated
460
461forcoll.mutated:
462 call void @objc_enumerationMutation(i8* %1)
463 br label %forcoll.notmutated
464
465forcoll.notmutated:
David Blaikiea79ac142015-02-27 21:17:42 +0000466 %stateitems = load i8**, i8*** %stateitems.ptr, align 8
David Blaikie79e6c742015-02-27 19:29:02 +0000467 %currentitem.ptr = getelementptr i8*, i8** %stateitems, i64 %forcoll.index
David Blaikiea79ac142015-02-27 21:17:42 +0000468 %3 = load i8*, i8** %currentitem.ptr, align 8
Dan Gohman12130272011-08-12 00:26:31 +0000469 %tobool = icmp eq i8* %3, null
470 br i1 %tobool, label %forcoll.next, label %if.then
471
472if.then:
473 call void @callee()
474 br label %forcoll.next
475
476forcoll.next:
477 %4 = add i64 %forcoll.index, 1
478 %exitcond = icmp eq i64 %4, %umax
479 br i1 %exitcond, label %forcoll.refetch, label %forcoll.loopbody
480
481forcoll.refetch:
David Blaikiea79ac142015-02-27 21:17:42 +0000482 %tmp6 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
Dan Gohman12130272011-08-12 00:26:31 +0000483 %call7 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp6, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
484 %5 = icmp eq i64 %call7, 0
485 br i1 %5, label %forcoll.empty, label %forcoll.loopbody.outer
486
487forcoll.empty:
488 call void @objc_release(i8* %1) nounwind
489 call void @objc_release(i8* %0) nounwind, !clang.imprecise_release !0
490 ret void
491}
492
Dan Gohman500b5982012-03-09 18:50:52 +0000493; TODO: Delete a nested retain+release pair.
494; The optimizer currently can't do this, because of a split loop backedge.
495; See test9b for the same testcase without a split backedge.
Dan Gohman12130272011-08-12 00:26:31 +0000496
Stephen Lina76289a2013-07-14 01:50:49 +0000497; CHECK-LABEL: define void @test9(
Dan Gohman12130272011-08-12 00:26:31 +0000498; CHECK: call i8* @objc_retain
499; CHECK: call i8* @objc_retain
Dan Gohman500b5982012-03-09 18:50:52 +0000500; CHECK: call i8* @objc_retain
Dan Gohman12130272011-08-12 00:26:31 +0000501; CHECK: }
502define void @test9() nounwind {
503entry:
504 %state.ptr = alloca %struct.__objcFastEnumerationState, align 8
505 %items.ptr = alloca [16 x i8*], align 8
506 %call = call i8* @returner()
507 %0 = call i8* @objc_retainAutoreleasedReturnValue(i8* %call) nounwind
508 %call1 = call i8* @returner()
509 %1 = call i8* @objc_retainAutoreleasedReturnValue(i8* %call1) nounwind
510 %tmp = bitcast %struct.__objcFastEnumerationState* %state.ptr to i8*
511 call void @llvm.memset.p0i8.i64(i8* %tmp, i8 0, i64 64, i32 8, i1 false)
512 %2 = call i8* @objc_retain(i8* %0) nounwind
David Blaikiea79ac142015-02-27 21:17:42 +0000513 %tmp3 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
Dan Gohman12130272011-08-12 00:26:31 +0000514 %call4 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %2, i8* %tmp3, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
515 %iszero = icmp eq i64 %call4, 0
516 br i1 %iszero, label %forcoll.empty, label %forcoll.loopinit
517
518forcoll.loopinit:
David Blaikie79e6c742015-02-27 19:29:02 +0000519 %mutationsptr.ptr = getelementptr inbounds %struct.__objcFastEnumerationState, %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 2
David Blaikiea79ac142015-02-27 21:17:42 +0000520 %mutationsptr = load i64*, i64** %mutationsptr.ptr, align 8
521 %forcoll.initial-mutations = load i64, i64* %mutationsptr, align 8
Dan Gohman12130272011-08-12 00:26:31 +0000522 br label %forcoll.loopbody.outer
523
524forcoll.loopbody.outer:
525 %forcoll.count.ph = phi i64 [ %call4, %forcoll.loopinit ], [ %call7, %forcoll.refetch ]
526 %tmp9 = icmp ugt i64 %forcoll.count.ph, 1
527 %umax = select i1 %tmp9, i64 %forcoll.count.ph, i64 1
528 br label %forcoll.loopbody
529
530forcoll.loopbody:
531 %forcoll.index = phi i64 [ %phitmp, %forcoll.notmutated.forcoll.loopbody_crit_edge ], [ 1, %forcoll.loopbody.outer ]
David Blaikiea79ac142015-02-27 21:17:42 +0000532 %mutationsptr5 = load i64*, i64** %mutationsptr.ptr, align 8
533 %statemutations = load i64, i64* %mutationsptr5, align 8
Dan Gohman12130272011-08-12 00:26:31 +0000534 %3 = icmp eq i64 %statemutations, %forcoll.initial-mutations
535 br i1 %3, label %forcoll.notmutated, label %forcoll.mutated
536
537forcoll.mutated:
538 call void @objc_enumerationMutation(i8* %2)
539 br label %forcoll.notmutated
540
541forcoll.notmutated:
542 %exitcond = icmp eq i64 %forcoll.index, %umax
543 br i1 %exitcond, label %forcoll.refetch, label %forcoll.notmutated.forcoll.loopbody_crit_edge
544
545forcoll.notmutated.forcoll.loopbody_crit_edge:
546 %phitmp = add i64 %forcoll.index, 1
547 br label %forcoll.loopbody
548
549forcoll.refetch:
David Blaikiea79ac142015-02-27 21:17:42 +0000550 %tmp6 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
Dan Gohman12130272011-08-12 00:26:31 +0000551 %call7 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %2, i8* %tmp6, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
552 %4 = icmp eq i64 %call7, 0
553 br i1 %4, label %forcoll.empty, label %forcoll.loopbody.outer
554
555forcoll.empty:
556 call void @objc_release(i8* %2) nounwind
557 call void @objc_release(i8* %1) nounwind, !clang.imprecise_release !0
558 call void @objc_release(i8* %0) nounwind, !clang.imprecise_release !0
559 ret void
560}
561
Dan Gohmandf476e52012-09-04 23:16:20 +0000562; Like test9, but without a split backedge. TODO: optimize this.
Dan Gohman500b5982012-03-09 18:50:52 +0000563
Stephen Lina76289a2013-07-14 01:50:49 +0000564; CHECK-LABEL: define void @test9b(
Dan Gohman500b5982012-03-09 18:50:52 +0000565; CHECK: call i8* @objc_retain
566; CHECK: call i8* @objc_retain
Dan Gohmandf476e52012-09-04 23:16:20 +0000567; CHECK: @objc_retain
Dan Gohman500b5982012-03-09 18:50:52 +0000568; CHECK: }
569define void @test9b() nounwind {
570entry:
571 %state.ptr = alloca %struct.__objcFastEnumerationState, align 8
572 %items.ptr = alloca [16 x i8*], align 8
573 %call = call i8* @returner()
574 %0 = call i8* @objc_retainAutoreleasedReturnValue(i8* %call) nounwind
575 %call1 = call i8* @returner()
576 %1 = call i8* @objc_retainAutoreleasedReturnValue(i8* %call1) nounwind
577 %tmp = bitcast %struct.__objcFastEnumerationState* %state.ptr to i8*
578 call void @llvm.memset.p0i8.i64(i8* %tmp, i8 0, i64 64, i32 8, i1 false)
579 %2 = call i8* @objc_retain(i8* %0) nounwind
David Blaikiea79ac142015-02-27 21:17:42 +0000580 %tmp3 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
Dan Gohman500b5982012-03-09 18:50:52 +0000581 %call4 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %2, i8* %tmp3, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
582 %iszero = icmp eq i64 %call4, 0
583 br i1 %iszero, label %forcoll.empty, label %forcoll.loopinit
584
585forcoll.loopinit:
David Blaikie79e6c742015-02-27 19:29:02 +0000586 %mutationsptr.ptr = getelementptr inbounds %struct.__objcFastEnumerationState, %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 2
David Blaikiea79ac142015-02-27 21:17:42 +0000587 %mutationsptr = load i64*, i64** %mutationsptr.ptr, align 8
588 %forcoll.initial-mutations = load i64, i64* %mutationsptr, align 8
Dan Gohman500b5982012-03-09 18:50:52 +0000589 br label %forcoll.loopbody.outer
590
591forcoll.loopbody.outer:
592 %forcoll.count.ph = phi i64 [ %call4, %forcoll.loopinit ], [ %call7, %forcoll.refetch ]
593 %tmp9 = icmp ugt i64 %forcoll.count.ph, 1
594 %umax = select i1 %tmp9, i64 %forcoll.count.ph, i64 1
595 br label %forcoll.loopbody
596
597forcoll.loopbody:
598 %forcoll.index = phi i64 [ %phitmp, %forcoll.notmutated ], [ 0, %forcoll.loopbody.outer ]
David Blaikiea79ac142015-02-27 21:17:42 +0000599 %mutationsptr5 = load i64*, i64** %mutationsptr.ptr, align 8
600 %statemutations = load i64, i64* %mutationsptr5, align 8
Dan Gohman500b5982012-03-09 18:50:52 +0000601 %3 = icmp eq i64 %statemutations, %forcoll.initial-mutations
602 br i1 %3, label %forcoll.notmutated, label %forcoll.mutated
603
604forcoll.mutated:
605 call void @objc_enumerationMutation(i8* %2)
606 br label %forcoll.notmutated
607
608forcoll.notmutated:
609 %phitmp = add i64 %forcoll.index, 1
610 %exitcond = icmp eq i64 %phitmp, %umax
611 br i1 %exitcond, label %forcoll.refetch, label %forcoll.loopbody
612
613forcoll.refetch:
David Blaikiea79ac142015-02-27 21:17:42 +0000614 %tmp6 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
Dan Gohman500b5982012-03-09 18:50:52 +0000615 %call7 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %2, i8* %tmp6, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
616 %4 = icmp eq i64 %call7, 0
617 br i1 %4, label %forcoll.empty, label %forcoll.loopbody.outer
618
619forcoll.empty:
620 call void @objc_release(i8* %2) nounwind
621 call void @objc_release(i8* %1) nounwind, !clang.imprecise_release !0
622 call void @objc_release(i8* %0) nounwind, !clang.imprecise_release !0
623 ret void
624}
625
626; TODO: Delete a nested retain+release pair.
627; The optimizer currently can't do this, because of a split loop backedge.
628; See test10b for the same testcase without a split backedge.
Dan Gohman12130272011-08-12 00:26:31 +0000629
Stephen Lina76289a2013-07-14 01:50:49 +0000630; CHECK-LABEL: define void @test10(
Dan Gohman12130272011-08-12 00:26:31 +0000631; CHECK: call i8* @objc_retain
632; CHECK: call i8* @objc_retain
Dan Gohman500b5982012-03-09 18:50:52 +0000633; CHECK: call i8* @objc_retain
Dan Gohman12130272011-08-12 00:26:31 +0000634; CHECK: }
635define void @test10() nounwind {
636entry:
637 %state.ptr = alloca %struct.__objcFastEnumerationState, align 8
638 %items.ptr = alloca [16 x i8*], align 8
639 %call = call i8* @returner()
640 %0 = call i8* @objc_retainAutoreleasedReturnValue(i8* %call) nounwind
641 %call1 = call i8* @returner()
642 %1 = call i8* @objc_retainAutoreleasedReturnValue(i8* %call1) nounwind
643 call void @callee()
644 %tmp = bitcast %struct.__objcFastEnumerationState* %state.ptr to i8*
645 call void @llvm.memset.p0i8.i64(i8* %tmp, i8 0, i64 64, i32 8, i1 false)
646 %2 = call i8* @objc_retain(i8* %0) nounwind
David Blaikiea79ac142015-02-27 21:17:42 +0000647 %tmp3 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
Dan Gohman12130272011-08-12 00:26:31 +0000648 %call4 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %2, i8* %tmp3, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
649 %iszero = icmp eq i64 %call4, 0
650 br i1 %iszero, label %forcoll.empty, label %forcoll.loopinit
651
652forcoll.loopinit:
David Blaikie79e6c742015-02-27 19:29:02 +0000653 %mutationsptr.ptr = getelementptr inbounds %struct.__objcFastEnumerationState, %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 2
David Blaikiea79ac142015-02-27 21:17:42 +0000654 %mutationsptr = load i64*, i64** %mutationsptr.ptr, align 8
655 %forcoll.initial-mutations = load i64, i64* %mutationsptr, align 8
Dan Gohman12130272011-08-12 00:26:31 +0000656 br label %forcoll.loopbody.outer
657
658forcoll.loopbody.outer:
659 %forcoll.count.ph = phi i64 [ %call4, %forcoll.loopinit ], [ %call7, %forcoll.refetch ]
660 %tmp9 = icmp ugt i64 %forcoll.count.ph, 1
661 %umax = select i1 %tmp9, i64 %forcoll.count.ph, i64 1
662 br label %forcoll.loopbody
663
664forcoll.loopbody:
665 %forcoll.index = phi i64 [ %phitmp, %forcoll.notmutated.forcoll.loopbody_crit_edge ], [ 1, %forcoll.loopbody.outer ]
David Blaikiea79ac142015-02-27 21:17:42 +0000666 %mutationsptr5 = load i64*, i64** %mutationsptr.ptr, align 8
667 %statemutations = load i64, i64* %mutationsptr5, align 8
Dan Gohman12130272011-08-12 00:26:31 +0000668 %3 = icmp eq i64 %statemutations, %forcoll.initial-mutations
669 br i1 %3, label %forcoll.notmutated, label %forcoll.mutated
670
671forcoll.mutated:
672 call void @objc_enumerationMutation(i8* %2)
673 br label %forcoll.notmutated
674
675forcoll.notmutated:
676 %exitcond = icmp eq i64 %forcoll.index, %umax
677 br i1 %exitcond, label %forcoll.refetch, label %forcoll.notmutated.forcoll.loopbody_crit_edge
678
679forcoll.notmutated.forcoll.loopbody_crit_edge:
680 %phitmp = add i64 %forcoll.index, 1
681 br label %forcoll.loopbody
682
683forcoll.refetch:
David Blaikiea79ac142015-02-27 21:17:42 +0000684 %tmp6 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
Dan Gohman12130272011-08-12 00:26:31 +0000685 %call7 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %2, i8* %tmp6, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
686 %4 = icmp eq i64 %call7, 0
687 br i1 %4, label %forcoll.empty, label %forcoll.loopbody.outer
688
689forcoll.empty:
690 call void @objc_release(i8* %2) nounwind
691 call void @objc_release(i8* %1) nounwind, !clang.imprecise_release !0
692 call void @objc_release(i8* %0) nounwind, !clang.imprecise_release !0
693 ret void
694}
Dan Gohman500b5982012-03-09 18:50:52 +0000695
Dan Gohmandf476e52012-09-04 23:16:20 +0000696; Like test10, but without a split backedge. TODO: optimize this.
Dan Gohman500b5982012-03-09 18:50:52 +0000697
Stephen Lina76289a2013-07-14 01:50:49 +0000698; CHECK-LABEL: define void @test10b(
Dan Gohman500b5982012-03-09 18:50:52 +0000699; CHECK: call i8* @objc_retain
700; CHECK: call i8* @objc_retain
Dan Gohmandf476e52012-09-04 23:16:20 +0000701; CHECK: @objc_retain
Dan Gohman500b5982012-03-09 18:50:52 +0000702; CHECK: }
703define void @test10b() nounwind {
704entry:
705 %state.ptr = alloca %struct.__objcFastEnumerationState, align 8
706 %items.ptr = alloca [16 x i8*], align 8
707 %call = call i8* @returner()
708 %0 = call i8* @objc_retainAutoreleasedReturnValue(i8* %call) nounwind
709 %call1 = call i8* @returner()
710 %1 = call i8* @objc_retainAutoreleasedReturnValue(i8* %call1) nounwind
711 call void @callee()
712 %tmp = bitcast %struct.__objcFastEnumerationState* %state.ptr to i8*
713 call void @llvm.memset.p0i8.i64(i8* %tmp, i8 0, i64 64, i32 8, i1 false)
714 %2 = call i8* @objc_retain(i8* %0) nounwind
David Blaikiea79ac142015-02-27 21:17:42 +0000715 %tmp3 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
Dan Gohman500b5982012-03-09 18:50:52 +0000716 %call4 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %2, i8* %tmp3, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
717 %iszero = icmp eq i64 %call4, 0
718 br i1 %iszero, label %forcoll.empty, label %forcoll.loopinit
719
720forcoll.loopinit:
David Blaikie79e6c742015-02-27 19:29:02 +0000721 %mutationsptr.ptr = getelementptr inbounds %struct.__objcFastEnumerationState, %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 2
David Blaikiea79ac142015-02-27 21:17:42 +0000722 %mutationsptr = load i64*, i64** %mutationsptr.ptr, align 8
723 %forcoll.initial-mutations = load i64, i64* %mutationsptr, align 8
Dan Gohman500b5982012-03-09 18:50:52 +0000724 br label %forcoll.loopbody.outer
725
726forcoll.loopbody.outer:
727 %forcoll.count.ph = phi i64 [ %call4, %forcoll.loopinit ], [ %call7, %forcoll.refetch ]
728 %tmp9 = icmp ugt i64 %forcoll.count.ph, 1
729 %umax = select i1 %tmp9, i64 %forcoll.count.ph, i64 1
730 br label %forcoll.loopbody
731
732forcoll.loopbody:
733 %forcoll.index = phi i64 [ %phitmp, %forcoll.notmutated ], [ 0, %forcoll.loopbody.outer ]
David Blaikiea79ac142015-02-27 21:17:42 +0000734 %mutationsptr5 = load i64*, i64** %mutationsptr.ptr, align 8
735 %statemutations = load i64, i64* %mutationsptr5, align 8
Dan Gohman500b5982012-03-09 18:50:52 +0000736 %3 = icmp eq i64 %statemutations, %forcoll.initial-mutations
737 br i1 %3, label %forcoll.notmutated, label %forcoll.mutated
738
739forcoll.mutated:
740 call void @objc_enumerationMutation(i8* %2)
741 br label %forcoll.notmutated
742
743forcoll.notmutated:
744 %phitmp = add i64 %forcoll.index, 1
745 %exitcond = icmp eq i64 %phitmp, %umax
746 br i1 %exitcond, label %forcoll.refetch, label %forcoll.loopbody
747
748forcoll.refetch:
David Blaikiea79ac142015-02-27 21:17:42 +0000749 %tmp6 = load i8*, i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
Dan Gohman500b5982012-03-09 18:50:52 +0000750 %call7 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %2, i8* %tmp6, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
751 %4 = icmp eq i64 %call7, 0
752 br i1 %4, label %forcoll.empty, label %forcoll.loopbody.outer
753
754forcoll.empty:
755 call void @objc_release(i8* %2) nounwind
756 call void @objc_release(i8* %1) nounwind, !clang.imprecise_release !0
757 call void @objc_release(i8* %0) nounwind, !clang.imprecise_release !0
758 ret void
759}
Dan Gohmandf476e52012-09-04 23:16:20 +0000760
761; Pointers to strong pointers can obscure provenance relationships. Be conservative
762; in the face of escaping pointers. rdar://12150909.
763
764%struct.__block_d = type { i64, i64 }
765
766@_NSConcreteStackBlock = external global i8*
767@__block_d_tmp = external hidden constant { i64, i64, i8*, i8*, i8*, i8* }
768@__block_d_tmp5 = external hidden constant { i64, i64, i8*, i8*, i8*, i8* }
769
Stephen Lina76289a2013-07-14 01:50:49 +0000770; CHECK-LABEL: define void @test11(
Bill Wendlinga0323742013-02-22 09:09:42 +0000771; CHECK: tail call i8* @objc_retain(i8* %call) [[NUW:#[0-9]+]]
772; CHECK: tail call i8* @objc_retain(i8* %call) [[NUW]]
773; CHECK: call void @objc_release(i8* %call) [[NUW]], !clang.imprecise_release !0
Dan Gohmandf476e52012-09-04 23:16:20 +0000774; CHECK: }
775define void @test11() {
776entry:
777 %block = alloca <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>, align 8
778 %block9 = alloca <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>, align 8
779 %call = call i8* @def(), !clang.arc.no_objc_arc_exceptions !0
David Blaikie79e6c742015-02-27 19:29:02 +0000780 %foo = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>, <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block, i64 0, i32 5
781 %block.isa = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>, <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block, i64 0, i32 0
Dan Gohmandf476e52012-09-04 23:16:20 +0000782 store i8* bitcast (i8** @_NSConcreteStackBlock to i8*), i8** %block.isa, align 8
David Blaikie79e6c742015-02-27 19:29:02 +0000783 %block.flags = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>, <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block, i64 0, i32 1
Dan Gohmandf476e52012-09-04 23:16:20 +0000784 store i32 1107296256, i32* %block.flags, align 8
David Blaikie79e6c742015-02-27 19:29:02 +0000785 %block.reserved = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>, <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block, i64 0, i32 2
Dan Gohmandf476e52012-09-04 23:16:20 +0000786 store i32 0, i32* %block.reserved, align 4
David Blaikie79e6c742015-02-27 19:29:02 +0000787 %block.invoke = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>, <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block, i64 0, i32 3
Dan Gohmandf476e52012-09-04 23:16:20 +0000788 store i8* bitcast (void (i8*)* @__crasher_block_invoke to i8*), i8** %block.invoke, align 8
David Blaikie79e6c742015-02-27 19:29:02 +0000789 %block.d = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>, <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block, i64 0, i32 4
Dan Gohmandf476e52012-09-04 23:16:20 +0000790 store %struct.__block_d* bitcast ({ i64, i64, i8*, i8*, i8*, i8* }* @__block_d_tmp to %struct.__block_d*), %struct.__block_d** %block.d, align 8
791 %foo2 = tail call i8* @objc_retain(i8* %call) nounwind
792 store i8* %foo2, i8** %foo, align 8
793 %foo4 = bitcast <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block to i8*
794 %foo5 = call i8* @objc_retainBlock(i8* %foo4) nounwind
795 call void @use(i8* %foo5), !clang.arc.no_objc_arc_exceptions !0
796 call void @objc_release(i8* %foo5) nounwind
David Blaikiea79ac142015-02-27 21:17:42 +0000797 %strongdestroy = load i8*, i8** %foo, align 8
Dan Gohmandf476e52012-09-04 23:16:20 +0000798 call void @objc_release(i8* %strongdestroy) nounwind, !clang.imprecise_release !0
David Blaikie79e6c742015-02-27 19:29:02 +0000799 %foo10 = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>, <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block9, i64 0, i32 5
800 %block.isa11 = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>, <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block9, i64 0, i32 0
Dan Gohmandf476e52012-09-04 23:16:20 +0000801 store i8* bitcast (i8** @_NSConcreteStackBlock to i8*), i8** %block.isa11, align 8
David Blaikie79e6c742015-02-27 19:29:02 +0000802 %block.flags12 = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>, <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block9, i64 0, i32 1
Dan Gohmandf476e52012-09-04 23:16:20 +0000803 store i32 1107296256, i32* %block.flags12, align 8
David Blaikie79e6c742015-02-27 19:29:02 +0000804 %block.reserved13 = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>, <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block9, i64 0, i32 2
Dan Gohmandf476e52012-09-04 23:16:20 +0000805 store i32 0, i32* %block.reserved13, align 4
David Blaikie79e6c742015-02-27 19:29:02 +0000806 %block.invoke14 = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>, <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block9, i64 0, i32 3
Dan Gohmandf476e52012-09-04 23:16:20 +0000807 store i8* bitcast (void (i8*)* @__crasher_block_invoke1 to i8*), i8** %block.invoke14, align 8
David Blaikie79e6c742015-02-27 19:29:02 +0000808 %block.d15 = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>, <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block9, i64 0, i32 4
Dan Gohmandf476e52012-09-04 23:16:20 +0000809 store %struct.__block_d* bitcast ({ i64, i64, i8*, i8*, i8*, i8* }* @__block_d_tmp5 to %struct.__block_d*), %struct.__block_d** %block.d15, align 8
810 %foo18 = call i8* @objc_retain(i8* %call) nounwind
811 store i8* %call, i8** %foo10, align 8
812 %foo20 = bitcast <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block9 to i8*
813 %foo21 = call i8* @objc_retainBlock(i8* %foo20) nounwind
814 call void @use(i8* %foo21), !clang.arc.no_objc_arc_exceptions !0
815 call void @objc_release(i8* %foo21) nounwind
David Blaikiea79ac142015-02-27 21:17:42 +0000816 %strongdestroy25 = load i8*, i8** %foo10, align 8
Dan Gohmandf476e52012-09-04 23:16:20 +0000817 call void @objc_release(i8* %strongdestroy25) nounwind, !clang.imprecise_release !0
818 call void @objc_release(i8* %call) nounwind, !clang.imprecise_release !0
819 ret void
820}
Bill Wendlinga0323742013-02-22 09:09:42 +0000821
Igor Laevsky30143ae2015-08-13 17:40:04 +0000822
823; CHECK: attributes #0 = { nounwind argmemonly }
Bill Wendlinga0323742013-02-22 09:09:42 +0000824; CHECK: attributes #1 = { nonlazybind }
Igor Laevsky30143ae2015-08-13 17:40:04 +0000825; CHECK: attributes [[NUW]] = { nounwind }