blob: a618a21d8bb395c67a5a4f9183a5429d52143bb8 [file] [log] [blame]
Dan Gohman12130272011-08-12 00:26:31 +00001; RUN: opt -objc-arc -S < %s | FileCheck %s
2
3%struct.__objcFastEnumerationState = type { i64, i8**, i64*, [5 x i64] }
4
5@"\01L_OBJC_METH_VAR_NAME_" = internal global [43 x i8] c"countByEnumeratingWithState:objects:count:\00", section "__TEXT,__objc_methname,cstring_literals", align 1
6@"\01L_OBJC_SELECTOR_REFERENCES_" = internal global i8* getelementptr inbounds ([43 x i8]* @"\01L_OBJC_METH_VAR_NAME_", i64 0, i64 0), section "__DATA, __objc_selrefs, literal_pointers, no_dead_strip"
7@g = common global i8* null, align 8
8@"\01L_OBJC_IMAGE_INFO" = internal constant [2 x i32] [i32 0, i32 16], section "__DATA, __objc_imageinfo, regular, no_dead_strip"
9
10declare void @callee()
11declare i8* @returner()
12declare i8* @objc_retainAutoreleasedReturnValue(i8*)
13declare i8* @objc_retain(i8*)
14declare void @objc_enumerationMutation(i8*)
15declare void @llvm.memset.p0i8.i64(i8* nocapture, i8, i64, i32, i1) nounwind
16declare i8* @objc_msgSend(i8*, i8*, ...) nonlazybind
17declare void @use(i8*)
18declare void @objc_release(i8*)
19
20!0 = metadata !{}
21
22; Delete a nested retain+release pair.
23
24; CHECK: define void @test0(
25; CHECK: call i8* @objc_retain
26; CHECK-NOT: @objc_retain
27; CHECK: }
28define void @test0(i8* %a) nounwind {
29entry:
30 %state.ptr = alloca %struct.__objcFastEnumerationState, align 8
31 %items.ptr = alloca [16 x i8*], align 8
32 %0 = call i8* @objc_retain(i8* %a) nounwind
33 %tmp = bitcast %struct.__objcFastEnumerationState* %state.ptr to i8*
34 call void @llvm.memset.p0i8.i64(i8* %tmp, i8 0, i64 64, i32 8, i1 false)
35 %1 = call i8* @objc_retain(i8* %0) nounwind
36 %tmp2 = load i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
37 %call = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp2, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
38 %iszero = icmp eq i64 %call, 0
39 br i1 %iszero, label %forcoll.empty, label %forcoll.loopinit
40
41forcoll.loopinit:
42 %mutationsptr.ptr = getelementptr inbounds %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 2
43 %mutationsptr = load i64** %mutationsptr.ptr, align 8
44 %forcoll.initial-mutations = load i64* %mutationsptr, align 8
45 %stateitems.ptr = getelementptr inbounds %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 1
46 br label %forcoll.loopbody.outer
47
48forcoll.loopbody.outer:
49 %forcoll.count.ph = phi i64 [ %call, %forcoll.loopinit ], [ %call6, %forcoll.refetch ]
50 %tmp7 = icmp ugt i64 %forcoll.count.ph, 1
51 %umax = select i1 %tmp7, i64 %forcoll.count.ph, i64 1
52 br label %forcoll.loopbody
53
54forcoll.loopbody:
55 %forcoll.index = phi i64 [ 0, %forcoll.loopbody.outer ], [ %4, %forcoll.notmutated ]
56 %mutationsptr3 = load i64** %mutationsptr.ptr, align 8
57 %statemutations = load i64* %mutationsptr3, align 8
58 %2 = icmp eq i64 %statemutations, %forcoll.initial-mutations
59 br i1 %2, label %forcoll.notmutated, label %forcoll.mutated
60
61forcoll.mutated:
62 call void @objc_enumerationMutation(i8* %1)
63 br label %forcoll.notmutated
64
65forcoll.notmutated:
66 %stateitems = load i8*** %stateitems.ptr, align 8
67 %currentitem.ptr = getelementptr i8** %stateitems, i64 %forcoll.index
68 %3 = load i8** %currentitem.ptr, align 8
69 call void @use(i8* %3)
70 %4 = add i64 %forcoll.index, 1
71 %exitcond = icmp eq i64 %4, %umax
72 br i1 %exitcond, label %forcoll.refetch, label %forcoll.loopbody
73
74forcoll.refetch:
75 %tmp5 = load i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
76 %call6 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp5, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
77 %5 = icmp eq i64 %call6, 0
78 br i1 %5, label %forcoll.empty, label %forcoll.loopbody.outer
79
80forcoll.empty:
81 call void @objc_release(i8* %1) nounwind
82 call void @objc_release(i8* %0) nounwind, !clang.imprecise_release !0
83 ret void
84}
85
86; Delete a nested retain+release pair.
87
88; CHECK: define void @test2(
89; CHECK: call i8* @objc_retain
90; CHECK-NOT: @objc_retain
91; CHECK: }
92define void @test2() nounwind {
93entry:
94 %state.ptr = alloca %struct.__objcFastEnumerationState, align 8
95 %items.ptr = alloca [16 x i8*], align 8
96 %call = call i8* @returner()
97 %0 = call i8* @objc_retainAutoreleasedReturnValue(i8* %call) nounwind
98 %tmp = bitcast %struct.__objcFastEnumerationState* %state.ptr to i8*
99 call void @llvm.memset.p0i8.i64(i8* %tmp, i8 0, i64 64, i32 8, i1 false)
100 %1 = call i8* @objc_retain(i8* %0) nounwind
101 %tmp2 = load i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
102 %call3 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp2, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
103 %iszero = icmp eq i64 %call3, 0
104 br i1 %iszero, label %forcoll.empty, label %forcoll.loopinit
105
106forcoll.loopinit:
107 %mutationsptr.ptr = getelementptr inbounds %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 2
108 %mutationsptr = load i64** %mutationsptr.ptr, align 8
109 %forcoll.initial-mutations = load i64* %mutationsptr, align 8
110 %stateitems.ptr = getelementptr inbounds %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 1
111 br label %forcoll.loopbody.outer
112
113forcoll.loopbody.outer:
114 %forcoll.count.ph = phi i64 [ %call3, %forcoll.loopinit ], [ %call7, %forcoll.refetch ]
115 %tmp8 = icmp ugt i64 %forcoll.count.ph, 1
116 %umax = select i1 %tmp8, i64 %forcoll.count.ph, i64 1
117 br label %forcoll.loopbody
118
119forcoll.loopbody:
120 %forcoll.index = phi i64 [ 0, %forcoll.loopbody.outer ], [ %4, %forcoll.notmutated ]
121 %mutationsptr4 = load i64** %mutationsptr.ptr, align 8
122 %statemutations = load i64* %mutationsptr4, align 8
123 %2 = icmp eq i64 %statemutations, %forcoll.initial-mutations
124 br i1 %2, label %forcoll.notmutated, label %forcoll.mutated
125
126forcoll.mutated:
127 call void @objc_enumerationMutation(i8* %1)
128 br label %forcoll.notmutated
129
130forcoll.notmutated:
131 %stateitems = load i8*** %stateitems.ptr, align 8
132 %currentitem.ptr = getelementptr i8** %stateitems, i64 %forcoll.index
133 %3 = load i8** %currentitem.ptr, align 8
134 call void @use(i8* %3)
135 %4 = add i64 %forcoll.index, 1
136 %exitcond = icmp eq i64 %4, %umax
137 br i1 %exitcond, label %forcoll.refetch, label %forcoll.loopbody
138
139forcoll.refetch:
140 %tmp6 = load i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
141 %call7 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp6, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
142 %5 = icmp eq i64 %call7, 0
143 br i1 %5, label %forcoll.empty, label %forcoll.loopbody.outer
144
145forcoll.empty:
146 call void @objc_release(i8* %1) nounwind
147 call void @objc_release(i8* %0) nounwind, !clang.imprecise_release !0
148 ret void
149}
150
151; Delete a nested retain+release pair.
152
153; CHECK: define void @test4(
154; CHECK: call i8* @objc_retain
155; CHECK-NOT: @objc_retain
156; CHECK: }
157define void @test4() nounwind {
158entry:
159 %state.ptr = alloca %struct.__objcFastEnumerationState, align 8
160 %items.ptr = alloca [16 x i8*], align 8
161 %tmp = load i8** @g, align 8
162 %0 = call i8* @objc_retain(i8* %tmp) nounwind
163 %tmp2 = bitcast %struct.__objcFastEnumerationState* %state.ptr to i8*
164 call void @llvm.memset.p0i8.i64(i8* %tmp2, i8 0, i64 64, i32 8, i1 false)
165 %1 = call i8* @objc_retain(i8* %0) nounwind
166 %tmp4 = load i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
167 %call = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp4, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
168 %iszero = icmp eq i64 %call, 0
169 br i1 %iszero, label %forcoll.empty, label %forcoll.loopinit
170
171forcoll.loopinit:
172 %mutationsptr.ptr = getelementptr inbounds %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 2
173 %mutationsptr = load i64** %mutationsptr.ptr, align 8
174 %forcoll.initial-mutations = load i64* %mutationsptr, align 8
175 %stateitems.ptr = getelementptr inbounds %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 1
176 br label %forcoll.loopbody.outer
177
178forcoll.loopbody.outer:
179 %forcoll.count.ph = phi i64 [ %call, %forcoll.loopinit ], [ %call8, %forcoll.refetch ]
180 %tmp9 = icmp ugt i64 %forcoll.count.ph, 1
181 %umax = select i1 %tmp9, i64 %forcoll.count.ph, i64 1
182 br label %forcoll.loopbody
183
184forcoll.loopbody:
185 %forcoll.index = phi i64 [ 0, %forcoll.loopbody.outer ], [ %4, %forcoll.notmutated ]
186 %mutationsptr5 = load i64** %mutationsptr.ptr, align 8
187 %statemutations = load i64* %mutationsptr5, align 8
188 %2 = icmp eq i64 %statemutations, %forcoll.initial-mutations
189 br i1 %2, label %forcoll.notmutated, label %forcoll.mutated
190
191forcoll.mutated:
192 call void @objc_enumerationMutation(i8* %1)
193 br label %forcoll.notmutated
194
195forcoll.notmutated:
196 %stateitems = load i8*** %stateitems.ptr, align 8
197 %currentitem.ptr = getelementptr i8** %stateitems, i64 %forcoll.index
198 %3 = load i8** %currentitem.ptr, align 8
199 call void @use(i8* %3)
200 %4 = add i64 %forcoll.index, 1
201 %exitcond = icmp eq i64 %4, %umax
202 br i1 %exitcond, label %forcoll.refetch, label %forcoll.loopbody
203
204forcoll.refetch:
205 %tmp7 = load i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
206 %call8 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp7, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
207 %5 = icmp eq i64 %call8, 0
208 br i1 %5, label %forcoll.empty, label %forcoll.loopbody.outer
209
210forcoll.empty:
211 call void @objc_release(i8* %1) nounwind
212 call void @objc_release(i8* %0) nounwind, !clang.imprecise_release !0
213 ret void
214}
215
216; Delete a nested retain+release pair.
217
218; CHECK: define void @test5(
219; CHECK: call i8* @objc_retain
220; CHECK-NOT: @objc_retain
221; CHECK: }
222define void @test5() nounwind {
223entry:
224 %state.ptr = alloca %struct.__objcFastEnumerationState, align 8
225 %items.ptr = alloca [16 x i8*], align 8
226 %call = call i8* @returner()
227 %0 = call i8* @objc_retainAutoreleasedReturnValue(i8* %call) nounwind
228 call void @callee()
229 %tmp = bitcast %struct.__objcFastEnumerationState* %state.ptr to i8*
230 call void @llvm.memset.p0i8.i64(i8* %tmp, i8 0, i64 64, i32 8, i1 false)
231 %1 = call i8* @objc_retain(i8* %0) nounwind
232 %tmp2 = load i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
233 %call3 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp2, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
234 %iszero = icmp eq i64 %call3, 0
235 br i1 %iszero, label %forcoll.empty, label %forcoll.loopinit
236
237forcoll.loopinit:
238 %mutationsptr.ptr = getelementptr inbounds %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 2
239 %mutationsptr = load i64** %mutationsptr.ptr, align 8
240 %forcoll.initial-mutations = load i64* %mutationsptr, align 8
241 %stateitems.ptr = getelementptr inbounds %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 1
242 br label %forcoll.loopbody.outer
243
244forcoll.loopbody.outer:
245 %forcoll.count.ph = phi i64 [ %call3, %forcoll.loopinit ], [ %call7, %forcoll.refetch ]
246 %tmp8 = icmp ugt i64 %forcoll.count.ph, 1
247 %umax = select i1 %tmp8, i64 %forcoll.count.ph, i64 1
248 br label %forcoll.loopbody
249
250forcoll.loopbody:
251 %forcoll.index = phi i64 [ 0, %forcoll.loopbody.outer ], [ %4, %forcoll.notmutated ]
252 %mutationsptr4 = load i64** %mutationsptr.ptr, align 8
253 %statemutations = load i64* %mutationsptr4, align 8
254 %2 = icmp eq i64 %statemutations, %forcoll.initial-mutations
255 br i1 %2, label %forcoll.notmutated, label %forcoll.mutated
256
257forcoll.mutated:
258 call void @objc_enumerationMutation(i8* %1)
259 br label %forcoll.notmutated
260
261forcoll.notmutated:
262 %stateitems = load i8*** %stateitems.ptr, align 8
263 %currentitem.ptr = getelementptr i8** %stateitems, i64 %forcoll.index
264 %3 = load i8** %currentitem.ptr, align 8
265 call void @use(i8* %3)
266 %4 = add i64 %forcoll.index, 1
267 %exitcond = icmp eq i64 %4, %umax
268 br i1 %exitcond, label %forcoll.refetch, label %forcoll.loopbody
269
270forcoll.refetch:
271 %tmp6 = load i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
272 %call7 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp6, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
273 %5 = icmp eq i64 %call7, 0
274 br i1 %5, label %forcoll.empty, label %forcoll.loopbody.outer
275
276forcoll.empty:
277 call void @objc_release(i8* %1) nounwind
278 call void @objc_release(i8* %0) nounwind, !clang.imprecise_release !0
279 ret void
280}
281
282; Delete a nested retain+release pair.
283
284; CHECK: define void @test6(
285; CHECK: call i8* @objc_retain
286; CHECK-NOT: @objc_retain
287; CHECK: }
288define void @test6() nounwind {
289entry:
290 %state.ptr = alloca %struct.__objcFastEnumerationState, align 8
291 %items.ptr = alloca [16 x i8*], align 8
292 %call = call i8* @returner()
293 %0 = call i8* @objc_retainAutoreleasedReturnValue(i8* %call) nounwind
294 %tmp = bitcast %struct.__objcFastEnumerationState* %state.ptr to i8*
295 call void @llvm.memset.p0i8.i64(i8* %tmp, i8 0, i64 64, i32 8, i1 false)
296 %1 = call i8* @objc_retain(i8* %0) nounwind
297 %tmp2 = load i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
298 %call3 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp2, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
299 %iszero = icmp eq i64 %call3, 0
300 br i1 %iszero, label %forcoll.empty, label %forcoll.loopinit
301
302forcoll.loopinit:
303 %mutationsptr.ptr = getelementptr inbounds %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 2
304 %mutationsptr = load i64** %mutationsptr.ptr, align 8
305 %forcoll.initial-mutations = load i64* %mutationsptr, align 8
306 %stateitems.ptr = getelementptr inbounds %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 1
307 br label %forcoll.loopbody.outer
308
309forcoll.loopbody.outer:
310 %forcoll.count.ph = phi i64 [ %call3, %forcoll.loopinit ], [ %call7, %forcoll.refetch ]
311 %tmp8 = icmp ugt i64 %forcoll.count.ph, 1
312 %umax = select i1 %tmp8, i64 %forcoll.count.ph, i64 1
313 br label %forcoll.loopbody
314
315forcoll.loopbody:
316 %forcoll.index = phi i64 [ 0, %forcoll.loopbody.outer ], [ %4, %forcoll.notmutated ]
317 %mutationsptr4 = load i64** %mutationsptr.ptr, align 8
318 %statemutations = load i64* %mutationsptr4, align 8
319 %2 = icmp eq i64 %statemutations, %forcoll.initial-mutations
320 br i1 %2, label %forcoll.notmutated, label %forcoll.mutated
321
322forcoll.mutated:
323 call void @objc_enumerationMutation(i8* %1)
324 br label %forcoll.notmutated
325
326forcoll.notmutated:
327 %stateitems = load i8*** %stateitems.ptr, align 8
328 %currentitem.ptr = getelementptr i8** %stateitems, i64 %forcoll.index
329 %3 = load i8** %currentitem.ptr, align 8
330 call void @use(i8* %3)
331 %4 = add i64 %forcoll.index, 1
332 %exitcond = icmp eq i64 %4, %umax
333 br i1 %exitcond, label %forcoll.refetch, label %forcoll.loopbody
334
335forcoll.refetch:
336 %tmp6 = load i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
337 %call7 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp6, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
338 %5 = icmp eq i64 %call7, 0
339 br i1 %5, label %forcoll.empty, label %forcoll.loopbody.outer
340
341forcoll.empty:
342 call void @objc_release(i8* %1) nounwind
343 call void @callee()
344 call void @objc_release(i8* %0) nounwind, !clang.imprecise_release !0
345 ret void
346}
347
348; Delete a nested retain+release pair.
349
350; CHECK: define void @test7(
351; CHECK: call i8* @objc_retain
352; CHECK-NOT: @objc_retain
353; CHECK: }
354define void @test7() nounwind {
355entry:
356 %state.ptr = alloca %struct.__objcFastEnumerationState, align 8
357 %items.ptr = alloca [16 x i8*], align 8
358 %call = call i8* @returner()
359 %0 = call i8* @objc_retainAutoreleasedReturnValue(i8* %call) nounwind
360 call void @callee()
361 %tmp = bitcast %struct.__objcFastEnumerationState* %state.ptr to i8*
362 call void @llvm.memset.p0i8.i64(i8* %tmp, i8 0, i64 64, i32 8, i1 false)
363 %1 = call i8* @objc_retain(i8* %0) nounwind
364 %tmp2 = load i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
365 %call3 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp2, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
366 %iszero = icmp eq i64 %call3, 0
367 br i1 %iszero, label %forcoll.empty, label %forcoll.loopinit
368
369forcoll.loopinit:
370 %mutationsptr.ptr = getelementptr inbounds %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 2
371 %mutationsptr = load i64** %mutationsptr.ptr, align 8
372 %forcoll.initial-mutations = load i64* %mutationsptr, align 8
373 %stateitems.ptr = getelementptr inbounds %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 1
374 br label %forcoll.loopbody.outer
375
376forcoll.loopbody.outer:
377 %forcoll.count.ph = phi i64 [ %call3, %forcoll.loopinit ], [ %call7, %forcoll.refetch ]
378 %tmp8 = icmp ugt i64 %forcoll.count.ph, 1
379 %umax = select i1 %tmp8, i64 %forcoll.count.ph, i64 1
380 br label %forcoll.loopbody
381
382forcoll.loopbody:
383 %forcoll.index = phi i64 [ 0, %forcoll.loopbody.outer ], [ %4, %forcoll.notmutated ]
384 %mutationsptr4 = load i64** %mutationsptr.ptr, align 8
385 %statemutations = load i64* %mutationsptr4, align 8
386 %2 = icmp eq i64 %statemutations, %forcoll.initial-mutations
387 br i1 %2, label %forcoll.notmutated, label %forcoll.mutated
388
389forcoll.mutated:
390 call void @objc_enumerationMutation(i8* %1)
391 br label %forcoll.notmutated
392
393forcoll.notmutated:
394 %stateitems = load i8*** %stateitems.ptr, align 8
395 %currentitem.ptr = getelementptr i8** %stateitems, i64 %forcoll.index
396 %3 = load i8** %currentitem.ptr, align 8
397 call void @use(i8* %3)
398 %4 = add i64 %forcoll.index, 1
399 %exitcond = icmp eq i64 %4, %umax
400 br i1 %exitcond, label %forcoll.refetch, label %forcoll.loopbody
401
402forcoll.refetch:
403 %tmp6 = load i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
404 %call7 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp6, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
405 %5 = icmp eq i64 %call7, 0
406 br i1 %5, label %forcoll.empty, label %forcoll.loopbody.outer
407
408forcoll.empty:
409 call void @objc_release(i8* %1) nounwind
410 call void @callee()
411 call void @objc_release(i8* %0) nounwind, !clang.imprecise_release !0
412 ret void
413}
414
415; Delete a nested retain+release pair.
416
417; CHECK: define void @test8(
418; CHECK: call i8* @objc_retain
419; CHECK-NOT: @objc_retain
420; CHECK: }
421define void @test8() nounwind {
422entry:
423 %state.ptr = alloca %struct.__objcFastEnumerationState, align 8
424 %items.ptr = alloca [16 x i8*], align 8
425 %call = call i8* @returner()
426 %0 = call i8* @objc_retainAutoreleasedReturnValue(i8* %call) nounwind
427 %tmp = bitcast %struct.__objcFastEnumerationState* %state.ptr to i8*
428 call void @llvm.memset.p0i8.i64(i8* %tmp, i8 0, i64 64, i32 8, i1 false)
429 %1 = call i8* @objc_retain(i8* %0) nounwind
430 %tmp2 = load i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
431 %call3 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp2, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
432 %iszero = icmp eq i64 %call3, 0
433 br i1 %iszero, label %forcoll.empty, label %forcoll.loopinit
434
435forcoll.loopinit:
436 %mutationsptr.ptr = getelementptr inbounds %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 2
437 %mutationsptr = load i64** %mutationsptr.ptr, align 8
438 %forcoll.initial-mutations = load i64* %mutationsptr, align 8
439 %stateitems.ptr = getelementptr inbounds %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 1
440 br label %forcoll.loopbody.outer
441
442forcoll.loopbody.outer:
443 %forcoll.count.ph = phi i64 [ %call3, %forcoll.loopinit ], [ %call7, %forcoll.refetch ]
444 %tmp8 = icmp ugt i64 %forcoll.count.ph, 1
445 %umax = select i1 %tmp8, i64 %forcoll.count.ph, i64 1
446 br label %forcoll.loopbody
447
448forcoll.loopbody:
449 %forcoll.index = phi i64 [ 0, %forcoll.loopbody.outer ], [ %4, %forcoll.next ]
450 %mutationsptr4 = load i64** %mutationsptr.ptr, align 8
451 %statemutations = load i64* %mutationsptr4, align 8
452 %2 = icmp eq i64 %statemutations, %forcoll.initial-mutations
453 br i1 %2, label %forcoll.notmutated, label %forcoll.mutated
454
455forcoll.mutated:
456 call void @objc_enumerationMutation(i8* %1)
457 br label %forcoll.notmutated
458
459forcoll.notmutated:
460 %stateitems = load i8*** %stateitems.ptr, align 8
461 %currentitem.ptr = getelementptr i8** %stateitems, i64 %forcoll.index
462 %3 = load i8** %currentitem.ptr, align 8
463 %tobool = icmp eq i8* %3, null
464 br i1 %tobool, label %forcoll.next, label %if.then
465
466if.then:
467 call void @callee()
468 br label %forcoll.next
469
470forcoll.next:
471 %4 = add i64 %forcoll.index, 1
472 %exitcond = icmp eq i64 %4, %umax
473 br i1 %exitcond, label %forcoll.refetch, label %forcoll.loopbody
474
475forcoll.refetch:
476 %tmp6 = load i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
477 %call7 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %1, i8* %tmp6, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
478 %5 = icmp eq i64 %call7, 0
479 br i1 %5, label %forcoll.empty, label %forcoll.loopbody.outer
480
481forcoll.empty:
482 call void @objc_release(i8* %1) nounwind
483 call void @objc_release(i8* %0) nounwind, !clang.imprecise_release !0
484 ret void
485}
486
Dan Gohman500b5982012-03-09 18:50:52 +0000487; TODO: Delete a nested retain+release pair.
488; The optimizer currently can't do this, because of a split loop backedge.
489; See test9b for the same testcase without a split backedge.
Dan Gohman12130272011-08-12 00:26:31 +0000490
491; CHECK: define void @test9(
492; CHECK: call i8* @objc_retain
493; CHECK: call i8* @objc_retain
Dan Gohman500b5982012-03-09 18:50:52 +0000494; CHECK: call i8* @objc_retain
Dan Gohman12130272011-08-12 00:26:31 +0000495; CHECK: }
496define void @test9() nounwind {
497entry:
498 %state.ptr = alloca %struct.__objcFastEnumerationState, align 8
499 %items.ptr = alloca [16 x i8*], align 8
500 %call = call i8* @returner()
501 %0 = call i8* @objc_retainAutoreleasedReturnValue(i8* %call) nounwind
502 %call1 = call i8* @returner()
503 %1 = call i8* @objc_retainAutoreleasedReturnValue(i8* %call1) nounwind
504 %tmp = bitcast %struct.__objcFastEnumerationState* %state.ptr to i8*
505 call void @llvm.memset.p0i8.i64(i8* %tmp, i8 0, i64 64, i32 8, i1 false)
506 %2 = call i8* @objc_retain(i8* %0) nounwind
507 %tmp3 = load i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
508 %call4 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %2, i8* %tmp3, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
509 %iszero = icmp eq i64 %call4, 0
510 br i1 %iszero, label %forcoll.empty, label %forcoll.loopinit
511
512forcoll.loopinit:
513 %mutationsptr.ptr = getelementptr inbounds %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 2
514 %mutationsptr = load i64** %mutationsptr.ptr, align 8
515 %forcoll.initial-mutations = load i64* %mutationsptr, align 8
516 br label %forcoll.loopbody.outer
517
518forcoll.loopbody.outer:
519 %forcoll.count.ph = phi i64 [ %call4, %forcoll.loopinit ], [ %call7, %forcoll.refetch ]
520 %tmp9 = icmp ugt i64 %forcoll.count.ph, 1
521 %umax = select i1 %tmp9, i64 %forcoll.count.ph, i64 1
522 br label %forcoll.loopbody
523
524forcoll.loopbody:
525 %forcoll.index = phi i64 [ %phitmp, %forcoll.notmutated.forcoll.loopbody_crit_edge ], [ 1, %forcoll.loopbody.outer ]
526 %mutationsptr5 = load i64** %mutationsptr.ptr, align 8
527 %statemutations = load i64* %mutationsptr5, align 8
528 %3 = icmp eq i64 %statemutations, %forcoll.initial-mutations
529 br i1 %3, label %forcoll.notmutated, label %forcoll.mutated
530
531forcoll.mutated:
532 call void @objc_enumerationMutation(i8* %2)
533 br label %forcoll.notmutated
534
535forcoll.notmutated:
536 %exitcond = icmp eq i64 %forcoll.index, %umax
537 br i1 %exitcond, label %forcoll.refetch, label %forcoll.notmutated.forcoll.loopbody_crit_edge
538
539forcoll.notmutated.forcoll.loopbody_crit_edge:
540 %phitmp = add i64 %forcoll.index, 1
541 br label %forcoll.loopbody
542
543forcoll.refetch:
544 %tmp6 = load i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
545 %call7 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %2, i8* %tmp6, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
546 %4 = icmp eq i64 %call7, 0
547 br i1 %4, label %forcoll.empty, label %forcoll.loopbody.outer
548
549forcoll.empty:
550 call void @objc_release(i8* %2) nounwind
551 call void @objc_release(i8* %1) nounwind, !clang.imprecise_release !0
552 call void @objc_release(i8* %0) nounwind, !clang.imprecise_release !0
553 ret void
554}
555
Dan Gohman500b5982012-03-09 18:50:52 +0000556; Like test9, but without a split backedge. This we can optimize.
557
558; CHECK: define void @test9b(
559; CHECK: call i8* @objc_retain
560; CHECK: call i8* @objc_retain
561; CHECK-NOT: @objc_retain
562; CHECK: }
563define void @test9b() nounwind {
564entry:
565 %state.ptr = alloca %struct.__objcFastEnumerationState, align 8
566 %items.ptr = alloca [16 x i8*], align 8
567 %call = call i8* @returner()
568 %0 = call i8* @objc_retainAutoreleasedReturnValue(i8* %call) nounwind
569 %call1 = call i8* @returner()
570 %1 = call i8* @objc_retainAutoreleasedReturnValue(i8* %call1) nounwind
571 %tmp = bitcast %struct.__objcFastEnumerationState* %state.ptr to i8*
572 call void @llvm.memset.p0i8.i64(i8* %tmp, i8 0, i64 64, i32 8, i1 false)
573 %2 = call i8* @objc_retain(i8* %0) nounwind
574 %tmp3 = load i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
575 %call4 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %2, i8* %tmp3, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
576 %iszero = icmp eq i64 %call4, 0
577 br i1 %iszero, label %forcoll.empty, label %forcoll.loopinit
578
579forcoll.loopinit:
580 %mutationsptr.ptr = getelementptr inbounds %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 2
581 %mutationsptr = load i64** %mutationsptr.ptr, align 8
582 %forcoll.initial-mutations = load i64* %mutationsptr, align 8
583 br label %forcoll.loopbody.outer
584
585forcoll.loopbody.outer:
586 %forcoll.count.ph = phi i64 [ %call4, %forcoll.loopinit ], [ %call7, %forcoll.refetch ]
587 %tmp9 = icmp ugt i64 %forcoll.count.ph, 1
588 %umax = select i1 %tmp9, i64 %forcoll.count.ph, i64 1
589 br label %forcoll.loopbody
590
591forcoll.loopbody:
592 %forcoll.index = phi i64 [ %phitmp, %forcoll.notmutated ], [ 0, %forcoll.loopbody.outer ]
593 %mutationsptr5 = load i64** %mutationsptr.ptr, align 8
594 %statemutations = load i64* %mutationsptr5, align 8
595 %3 = icmp eq i64 %statemutations, %forcoll.initial-mutations
596 br i1 %3, label %forcoll.notmutated, label %forcoll.mutated
597
598forcoll.mutated:
599 call void @objc_enumerationMutation(i8* %2)
600 br label %forcoll.notmutated
601
602forcoll.notmutated:
603 %phitmp = add i64 %forcoll.index, 1
604 %exitcond = icmp eq i64 %phitmp, %umax
605 br i1 %exitcond, label %forcoll.refetch, label %forcoll.loopbody
606
607forcoll.refetch:
608 %tmp6 = load i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
609 %call7 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %2, i8* %tmp6, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
610 %4 = icmp eq i64 %call7, 0
611 br i1 %4, label %forcoll.empty, label %forcoll.loopbody.outer
612
613forcoll.empty:
614 call void @objc_release(i8* %2) nounwind
615 call void @objc_release(i8* %1) nounwind, !clang.imprecise_release !0
616 call void @objc_release(i8* %0) nounwind, !clang.imprecise_release !0
617 ret void
618}
619
620; TODO: Delete a nested retain+release pair.
621; The optimizer currently can't do this, because of a split loop backedge.
622; See test10b for the same testcase without a split backedge.
Dan Gohman12130272011-08-12 00:26:31 +0000623
624; CHECK: define void @test10(
625; CHECK: call i8* @objc_retain
626; CHECK: call i8* @objc_retain
Dan Gohman500b5982012-03-09 18:50:52 +0000627; CHECK: call i8* @objc_retain
Dan Gohman12130272011-08-12 00:26:31 +0000628; CHECK: }
629define void @test10() nounwind {
630entry:
631 %state.ptr = alloca %struct.__objcFastEnumerationState, align 8
632 %items.ptr = alloca [16 x i8*], align 8
633 %call = call i8* @returner()
634 %0 = call i8* @objc_retainAutoreleasedReturnValue(i8* %call) nounwind
635 %call1 = call i8* @returner()
636 %1 = call i8* @objc_retainAutoreleasedReturnValue(i8* %call1) nounwind
637 call void @callee()
638 %tmp = bitcast %struct.__objcFastEnumerationState* %state.ptr to i8*
639 call void @llvm.memset.p0i8.i64(i8* %tmp, i8 0, i64 64, i32 8, i1 false)
640 %2 = call i8* @objc_retain(i8* %0) nounwind
641 %tmp3 = load i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
642 %call4 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %2, i8* %tmp3, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
643 %iszero = icmp eq i64 %call4, 0
644 br i1 %iszero, label %forcoll.empty, label %forcoll.loopinit
645
646forcoll.loopinit:
647 %mutationsptr.ptr = getelementptr inbounds %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 2
648 %mutationsptr = load i64** %mutationsptr.ptr, align 8
649 %forcoll.initial-mutations = load i64* %mutationsptr, align 8
650 br label %forcoll.loopbody.outer
651
652forcoll.loopbody.outer:
653 %forcoll.count.ph = phi i64 [ %call4, %forcoll.loopinit ], [ %call7, %forcoll.refetch ]
654 %tmp9 = icmp ugt i64 %forcoll.count.ph, 1
655 %umax = select i1 %tmp9, i64 %forcoll.count.ph, i64 1
656 br label %forcoll.loopbody
657
658forcoll.loopbody:
659 %forcoll.index = phi i64 [ %phitmp, %forcoll.notmutated.forcoll.loopbody_crit_edge ], [ 1, %forcoll.loopbody.outer ]
660 %mutationsptr5 = load i64** %mutationsptr.ptr, align 8
661 %statemutations = load i64* %mutationsptr5, align 8
662 %3 = icmp eq i64 %statemutations, %forcoll.initial-mutations
663 br i1 %3, label %forcoll.notmutated, label %forcoll.mutated
664
665forcoll.mutated:
666 call void @objc_enumerationMutation(i8* %2)
667 br label %forcoll.notmutated
668
669forcoll.notmutated:
670 %exitcond = icmp eq i64 %forcoll.index, %umax
671 br i1 %exitcond, label %forcoll.refetch, label %forcoll.notmutated.forcoll.loopbody_crit_edge
672
673forcoll.notmutated.forcoll.loopbody_crit_edge:
674 %phitmp = add i64 %forcoll.index, 1
675 br label %forcoll.loopbody
676
677forcoll.refetch:
678 %tmp6 = load i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
679 %call7 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %2, i8* %tmp6, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
680 %4 = icmp eq i64 %call7, 0
681 br i1 %4, label %forcoll.empty, label %forcoll.loopbody.outer
682
683forcoll.empty:
684 call void @objc_release(i8* %2) nounwind
685 call void @objc_release(i8* %1) nounwind, !clang.imprecise_release !0
686 call void @objc_release(i8* %0) nounwind, !clang.imprecise_release !0
687 ret void
688}
Dan Gohman500b5982012-03-09 18:50:52 +0000689
690; Like test10, but without a split backedge. This we can optimize.
691
692; CHECK: define void @test10b(
693; CHECK: call i8* @objc_retain
694; CHECK: call i8* @objc_retain
695; CHECK-NOT: @objc_retain
696; CHECK: }
697define void @test10b() nounwind {
698entry:
699 %state.ptr = alloca %struct.__objcFastEnumerationState, align 8
700 %items.ptr = alloca [16 x i8*], align 8
701 %call = call i8* @returner()
702 %0 = call i8* @objc_retainAutoreleasedReturnValue(i8* %call) nounwind
703 %call1 = call i8* @returner()
704 %1 = call i8* @objc_retainAutoreleasedReturnValue(i8* %call1) nounwind
705 call void @callee()
706 %tmp = bitcast %struct.__objcFastEnumerationState* %state.ptr to i8*
707 call void @llvm.memset.p0i8.i64(i8* %tmp, i8 0, i64 64, i32 8, i1 false)
708 %2 = call i8* @objc_retain(i8* %0) nounwind
709 %tmp3 = load i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
710 %call4 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %2, i8* %tmp3, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
711 %iszero = icmp eq i64 %call4, 0
712 br i1 %iszero, label %forcoll.empty, label %forcoll.loopinit
713
714forcoll.loopinit:
715 %mutationsptr.ptr = getelementptr inbounds %struct.__objcFastEnumerationState* %state.ptr, i64 0, i32 2
716 %mutationsptr = load i64** %mutationsptr.ptr, align 8
717 %forcoll.initial-mutations = load i64* %mutationsptr, align 8
718 br label %forcoll.loopbody.outer
719
720forcoll.loopbody.outer:
721 %forcoll.count.ph = phi i64 [ %call4, %forcoll.loopinit ], [ %call7, %forcoll.refetch ]
722 %tmp9 = icmp ugt i64 %forcoll.count.ph, 1
723 %umax = select i1 %tmp9, i64 %forcoll.count.ph, i64 1
724 br label %forcoll.loopbody
725
726forcoll.loopbody:
727 %forcoll.index = phi i64 [ %phitmp, %forcoll.notmutated ], [ 0, %forcoll.loopbody.outer ]
728 %mutationsptr5 = load i64** %mutationsptr.ptr, align 8
729 %statemutations = load i64* %mutationsptr5, align 8
730 %3 = icmp eq i64 %statemutations, %forcoll.initial-mutations
731 br i1 %3, label %forcoll.notmutated, label %forcoll.mutated
732
733forcoll.mutated:
734 call void @objc_enumerationMutation(i8* %2)
735 br label %forcoll.notmutated
736
737forcoll.notmutated:
738 %phitmp = add i64 %forcoll.index, 1
739 %exitcond = icmp eq i64 %phitmp, %umax
740 br i1 %exitcond, label %forcoll.refetch, label %forcoll.loopbody
741
742forcoll.refetch:
743 %tmp6 = load i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
744 %call7 = call i64 bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to i64 (i8*, i8*, %struct.__objcFastEnumerationState*, [16 x i8*]*, i64)*)(i8* %2, i8* %tmp6, %struct.__objcFastEnumerationState* %state.ptr, [16 x i8*]* %items.ptr, i64 16)
745 %4 = icmp eq i64 %call7, 0
746 br i1 %4, label %forcoll.empty, label %forcoll.loopbody.outer
747
748forcoll.empty:
749 call void @objc_release(i8* %2) nounwind
750 call void @objc_release(i8* %1) nounwind, !clang.imprecise_release !0
751 call void @objc_release(i8* %0) nounwind, !clang.imprecise_release !0
752 ret void
753}