keep track of whether being in a RS_StructPointer state
caused us to skip layout out a function accurately. If
so, flush the type cache for both the function and struct
case to ensure that any pointers to the functions get
recomputed. This is overconservative, but with this patch
clang can build itself again.
llvm-svn: 134863
diff --git a/clang/test/CodeGenObjC/arc-foreach.m b/clang/test/CodeGenObjC/arc-foreach.m
index ccf655b..f9d7782 100644
--- a/clang/test/CodeGenObjC/arc-foreach.m
+++ b/clang/test/CodeGenObjC/arc-foreach.m
@@ -34,7 +34,7 @@
// CHECK-LP64-NEXT: [[T2:%.*]] = call i8* @objc_retain(i8* [[T1]])
// CHECK-LP64-NEXT: store i8* [[T2]], i8** [[T0]]
// CHECK-LP64-NEXT: [[T1:%.*]] = bitcast [[BLOCK_T]]* [[BLOCK]]
-// CHECK-LP64-NEXT: call void @use_block({{.*}}* [[T1]])
+// CHECK-LP64: call void @use_block(
// CHECK-LP64-NEXT: [[T1:%.*]] = load i8** [[T0]]
// CHECK-LP64-NEXT: call void @objc_release(i8* [[T1]])
@@ -67,6 +67,6 @@
// CHECK-LP64-NEXT: [[T1:%.*]] = call i8* @objc_loadWeak(i8** [[X]])
// CHECK-LP64-NEXT: call i8* @objc_initWeak(i8** [[T0]], i8* [[T1]])
// CHECK-LP64-NEXT: [[T1:%.*]] = bitcast [[BLOCK_T]]* [[BLOCK]] to
-// CHECK-LP64-NEXT: call void @use_block({{.*}} [[T1]])
+// CHECK-LP64: call void @use_block
// CHECK-LP64-NEXT: call void @objc_destroyWeak(i8** [[T0]])
// CHECK-LP64-NEXT: call void @objc_destroyWeak(i8** [[X]])