blob: 3fffc9d72cb10ee7c514a69cfb59082d79a8e16d [file] [log] [blame]
Reid Klecknera3609b02013-04-11 18:13:19 +00001// RUN: %clang_cc1 -fno-rtti -emit-llvm %s -o - -cxx-abi microsoft -triple=i386-pc-win32 | FileCheck %s
2
3struct B1 {
Reid Kleckner79e02912013-05-03 01:15:11 +00004 void foo();
Reid Klecknera3609b02013-04-11 18:13:19 +00005 int b;
6};
Reid Kleckner79e02912013-05-03 01:15:11 +00007struct B2 {
8 void foo();
9};
10struct Single : B1 {
11 void foo();
12};
13struct Multiple : B1, B2 {
14 void foo();
15};
Reid Klecknera3609b02013-04-11 18:13:19 +000016struct Virtual : virtual B1 {
17 int v;
Reid Kleckner79e02912013-05-03 01:15:11 +000018 void foo();
Reid Klecknera3609b02013-04-11 18:13:19 +000019};
Reid Klecknera8a0f762013-03-22 19:02:54 +000020
21struct POD {
22 int a;
23 int b;
24};
25
Reid Klecknera3609b02013-04-11 18:13:19 +000026struct Polymorphic {
27 virtual void myVirtual();
28 int a;
29 int b;
30};
31
32// This class uses the virtual inheritance model, yet its vbptr offset is not 0.
33// We still use zero for the null field offset, despite it being a valid field
34// offset.
35struct NonZeroVBPtr : POD, Virtual {
36 int n;
37};
38
39struct Unspecified;
40
Reid Kleckner79e02912013-05-03 01:15:11 +000041// Check that we can lower the LLVM types and get the null initializers right.
Reid Klecknera3609b02013-04-11 18:13:19 +000042int Single ::*s_d_memptr;
43int Polymorphic::*p_d_memptr;
44int Multiple ::*m_d_memptr;
45int Virtual ::*v_d_memptr;
46int NonZeroVBPtr::*n_d_memptr;
47int Unspecified::*u_d_memptr;
48// CHECK: @"\01?s_d_memptr@@3PQSingle@@HA" = global i32 -1, align 4
49// CHECK: @"\01?p_d_memptr@@3PQPolymorphic@@HA" = global i32 0, align 4
50// CHECK: @"\01?m_d_memptr@@3PQMultiple@@HA" = global i32 -1, align 4
51// CHECK: @"\01?v_d_memptr@@3PQVirtual@@HA" = global { i32, i32 }
52// CHECK: { i32 0, i32 -1 }, align 4
53// CHECK: @"\01?n_d_memptr@@3PQNonZeroVBPtr@@HA" = global { i32, i32 }
54// CHECK: { i32 0, i32 -1 }, align 4
55// CHECK: @"\01?u_d_memptr@@3PQUnspecified@@HA" = global { i32, i32, i32 }
56// CHECK: { i32 0, i32 0, i32 -1 }, align 4
57
58void (Single ::*s_f_memptr)();
59void (Multiple::*m_f_memptr)();
60void (Virtual ::*v_f_memptr)();
61// CHECK: @"\01?s_f_memptr@@3P8Single@@AEXXZA" = global i8* null, align 4
62// CHECK: @"\01?m_f_memptr@@3P8Multiple@@AEXXZA" = global { i8*, i32 } zeroinitializer, align 4
63// CHECK: @"\01?v_f_memptr@@3P8Virtual@@AEXXZA" = global { i8*, i32, i32 } zeroinitializer, align 4
64
Reid Kleckner79e02912013-05-03 01:15:11 +000065// We can define Unspecified after locking in the inheritance model.
66struct Unspecified : Virtual {
67 void foo();
68 int u;
69};
70
71struct UnspecWithVBPtr;
72int UnspecWithVBPtr::*forceUnspecWithVBPtr;
73struct UnspecWithVBPtr : B1, virtual B2 {
74 int u;
75 void foo();
76};
77
78// Test emitting non-virtual member pointers in a non-constexpr setting.
79void EmitNonVirtualMemberPointers() {
80 void (Single ::*s_f_memptr)() = &Single::foo;
81 void (Multiple ::*m_f_memptr)() = &Multiple::foo;
82 void (Virtual ::*v_f_memptr)() = &Virtual::foo;
83 void (Unspecified::*u_f_memptr)() = &Unspecified::foo;
84 void (UnspecWithVBPtr::*u2_f_memptr)() = &UnspecWithVBPtr::foo;
85// CHECK: define void @"\01?EmitNonVirtualMemberPointers@@YAXXZ"() #0 {
86// CHECK: alloca i8*, align 4
87// CHECK: alloca { i8*, i32 }, align 4
88// CHECK: alloca { i8*, i32, i32 }, align 4
89// CHECK: alloca { i8*, i32, i32, i32 }, align 4
90// CHECK: store i8* bitcast (void (%{{.*}}*)* @"\01?foo@Single@@QAEXXZ" to i8*), i8** %{{.*}}, align 4
91// CHECK: store { i8*, i32 }
92// CHECK: { i8* bitcast (void (%{{.*}}*)* @"\01?foo@Multiple@@QAEXXZ" to i8*), i32 0 },
93// CHECK: { i8*, i32 }* %{{.*}}, align 4
94// CHECK: store { i8*, i32, i32 }
95// CHECK: { i8* bitcast (void (%{{.*}}*)* @"\01?foo@Virtual@@QAEXXZ" to i8*), i32 0, i32 0 },
96// CHECK: { i8*, i32, i32 }* %{{.*}}, align 4
97// CHECK: store { i8*, i32, i32, i32 }
98// CHECK: { i8* bitcast (void (%{{.*}}*)* @"\01?foo@Unspecified@@QAEXXZ" to i8*), i32 0, i32 0, i32 0 },
99// CHECK: { i8*, i32, i32, i32 }* %{{.*}}, align 4
100// CHECK: store { i8*, i32, i32, i32 }
101// CHECK: { i8* bitcast (void (%{{.*}}*)* @"\01?foo@UnspecWithVBPtr@@QAEXXZ" to i8*),
102// CHECK: i32 0, i32 4, i32 0 },
103// CHECK: { i8*, i32, i32, i32 }* %{{.*}}, align 4
104// CHECK: ret void
105// CHECK: }
106}
107
Reid Klecknera8a0f762013-03-22 19:02:54 +0000108void podMemPtrs() {
109 int POD::*memptr;
110 memptr = &POD::a;
111 memptr = &POD::b;
112 if (memptr)
113 memptr = 0;
114// Check that member pointers use the right offsets and that null is -1.
115// CHECK: define void @"\01?podMemPtrs@@YAXXZ"() #0 {
116// CHECK: %[[memptr:.*]] = alloca i32, align 4
117// CHECK-NEXT: store i32 0, i32* %[[memptr]], align 4
118// CHECK-NEXT: store i32 4, i32* %[[memptr]], align 4
119// CHECK-NEXT: %[[memptr_val:.*]] = load i32* %[[memptr]], align 4
120// CHECK-NEXT: %{{.*}} = icmp ne i32 %[[memptr_val]], -1
121// CHECK-NEXT: br i1 %{{.*}}, label %{{.*}}, label %{{.*}}
122// CHECK: store i32 -1, i32* %[[memptr]], align 4
123// CHECK: ret void
124// CHECK: }
125}
126
Reid Klecknera8a0f762013-03-22 19:02:54 +0000127void polymorphicMemPtrs() {
128 int Polymorphic::*memptr;
129 memptr = &Polymorphic::a;
130 memptr = &Polymorphic::b;
131 if (memptr)
132 memptr = 0;
133// Member pointers for polymorphic classes include the vtable slot in their
134// offset and use 0 to represent null.
135// CHECK: define void @"\01?polymorphicMemPtrs@@YAXXZ"() #0 {
136// CHECK: %[[memptr:.*]] = alloca i32, align 4
137// CHECK-NEXT: store i32 4, i32* %[[memptr]], align 4
138// CHECK-NEXT: store i32 8, i32* %[[memptr]], align 4
139// CHECK-NEXT: %[[memptr_val:.*]] = load i32* %[[memptr]], align 4
140// CHECK-NEXT: %{{.*}} = icmp ne i32 %[[memptr_val]], 0
141// CHECK-NEXT: br i1 %{{.*}}, label %{{.*}}, label %{{.*}}
142// CHECK: store i32 0, i32* %[[memptr]], align 4
143// CHECK: ret void
144// CHECK: }
145}
Reid Klecknera3609b02013-04-11 18:13:19 +0000146
147bool nullTestDataUnspecified(int Unspecified::*mp) {
148 return mp;
149// CHECK: define zeroext i1 @"\01?nullTestDataUnspecified@@YA_NPQUnspecified@@H@Z"{{.*}} {
150// CHECK: %{{.*}} = load { i32, i32, i32 }* %{{.*}}, align 4
151// CHECK: store { i32, i32, i32 } {{.*}} align 4
152// CHECK: %[[mp:.*]] = load { i32, i32, i32 }* %{{.*}}, align 4
153// CHECK: %[[mp0:.*]] = extractvalue { i32, i32, i32 } %[[mp]], 0
154// CHECK: %[[cmp0:.*]] = icmp ne i32 %[[mp0]], 0
155// CHECK: %[[mp1:.*]] = extractvalue { i32, i32, i32 } %[[mp]], 1
156// CHECK: %[[cmp1:.*]] = icmp ne i32 %[[mp1]], 0
157// CHECK: %[[and0:.*]] = and i1 %[[cmp0]], %[[cmp1]]
158// CHECK: %[[mp2:.*]] = extractvalue { i32, i32, i32 } %[[mp]], 2
159// CHECK: %[[cmp2:.*]] = icmp ne i32 %[[mp2]], -1
160// CHECK: %[[and1:.*]] = and i1 %[[and0]], %[[cmp2]]
161// CHECK: ret i1 %[[and1]]
162// CHECK: }
163}
164
165bool nullTestFunctionUnspecified(void (Unspecified::*mp)()) {
166 return mp;
167// CHECK: define zeroext i1 @"\01?nullTestFunctionUnspecified@@YA_NP8Unspecified@@AEXXZ@Z"{{.*}} {
168// CHECK: %{{.*}} = load { i8*, i32, i32, i32 }* %{{.*}}, align 4
169// CHECK: store { i8*, i32, i32, i32 } {{.*}} align 4
170// CHECK: %[[mp:.*]] = load { i8*, i32, i32, i32 }* %{{.*}}, align 4
171// CHECK: %[[mp0:.*]] = extractvalue { i8*, i32, i32, i32 } %[[mp]], 0
172// CHECK: %[[cmp0:.*]] = icmp ne i8* %[[mp0]], null
173// CHECK: ret i1 %[[cmp0]]
174// CHECK: }
175}
176
177int loadDataMemberPointerVirtual(Virtual *o, int Virtual::*memptr) {
178 return o->*memptr;
179// Test that we can unpack this aggregate member pointer and load the member
180// data pointer.
181// CHECK: define i32 @"\01?loadDataMemberPointerVirtual@@YAHPAUVirtual@@PQ1@H@Z"{{.*}} {
182// CHECK: %[[o:.*]] = load %{{.*}}** %{{.*}}, align 4
Reid Kleckner49eef342013-04-11 19:01:17 +0000183// CHECK: %[[memptr:.*]] = load { i32, i32 }* %{{.*}}, align 4
Reid Klecknera3609b02013-04-11 18:13:19 +0000184// CHECK: %[[memptr0:.*]] = extractvalue { i32, i32 } %[[memptr:.*]], 0
185// CHECK: %[[memptr1:.*]] = extractvalue { i32, i32 } %[[memptr:.*]], 1
186// CHECK: %[[v6:.*]] = bitcast %{{.*}}* %[[o]] to i8*
187// CHECK: %[[vbptr:.*]] = getelementptr inbounds i8* %[[v6]], i32 0
188// CHECK: %[[vbptr_a:.*]] = bitcast i8* %[[vbptr]] to i8**
189// CHECK: %[[vbtable:.*]] = load i8** %[[vbptr_a:.*]]
190// CHECK: %[[v7:.*]] = getelementptr inbounds i8* %[[vbtable]], i32 %[[memptr1]]
191// CHECK: %[[v8:.*]] = bitcast i8* %[[v7]] to i32*
192// CHECK: %[[vbase_offs:.*]] = load i32* %[[v8]]
193// CHECK: %[[v10:.*]] = getelementptr inbounds i8* %[[vbptr]], i32 %[[vbase_offs]]
194// CHECK: %[[offset:.*]] = getelementptr inbounds i8* %[[v10]], i32 %[[memptr0]]
195// CHECK: %[[v11:.*]] = bitcast i8* %[[offset]] to i32*
196// CHECK: %[[v12:.*]] = load i32* %[[v11]]
197// CHECK: ret i32 %[[v12]]
198// CHECK: }
199}
200
201int loadDataMemberPointerUnspecified(Unspecified *o, int Unspecified::*memptr) {
202 return o->*memptr;
203// Test that we can unpack this aggregate member pointer and load the member
204// data pointer.
205// CHECK: define i32 @"\01?loadDataMemberPointerUnspecified@@YAHPAUUnspecified@@PQ1@H@Z"{{.*}} {
206// CHECK: %[[o:.*]] = load %{{.*}}** %{{.*}}, align 4
Reid Kleckner49eef342013-04-11 19:01:17 +0000207// CHECK: %[[memptr:.*]] = load { i32, i32, i32 }* %{{.*}}, align 4
Reid Klecknera3609b02013-04-11 18:13:19 +0000208// CHECK: %[[memptr0:.*]] = extractvalue { i32, i32, i32 } %[[memptr:.*]], 0
209// CHECK: %[[memptr1:.*]] = extractvalue { i32, i32, i32 } %[[memptr:.*]], 1
210// CHECK: %[[memptr2:.*]] = extractvalue { i32, i32, i32 } %[[memptr:.*]], 2
211// CHECK: %[[base:.*]] = bitcast %{{.*}}* %[[o]] to i8*
212// CHECK: %[[is_vbase:.*]] = icmp ne i32 %[[memptr2]], 0
213// CHECK: br i1 %[[is_vbase]], label %[[vadjust:.*]], label %[[skip:.*]]
214//
Reid Kleckner49eef342013-04-11 19:01:17 +0000215// CHECK: [[vadjust]]
Reid Klecknera3609b02013-04-11 18:13:19 +0000216// CHECK: %[[vbptr:.*]] = getelementptr inbounds i8* %[[base]], i32 %[[memptr1]]
217// CHECK: %[[vbptr_a:.*]] = bitcast i8* %[[vbptr]] to i8**
218// CHECK: %[[vbtable:.*]] = load i8** %[[vbptr_a:.*]]
219// CHECK: %[[v7:.*]] = getelementptr inbounds i8* %[[vbtable]], i32 %[[memptr2]]
220// CHECK: %[[v8:.*]] = bitcast i8* %[[v7]] to i32*
221// CHECK: %[[vbase_offs:.*]] = load i32* %[[v8]]
222// CHECK: %[[base_adj:.*]] = getelementptr inbounds i8* %[[vbptr]], i32 %[[vbase_offs]]
223//
Reid Kleckner49eef342013-04-11 19:01:17 +0000224// CHECK: [[skip]]
225// CHECK: %[[new_base:.*]] = phi i8* [ %[[base]], %{{.*}} ], [ %[[base_adj]], %[[vadjust]] ]
Reid Klecknera3609b02013-04-11 18:13:19 +0000226// CHECK: %[[offset:.*]] = getelementptr inbounds i8* %[[new_base]], i32 %[[memptr0]]
227// CHECK: %[[v11:.*]] = bitcast i8* %[[offset]] to i32*
228// CHECK: %[[v12:.*]] = load i32* %[[v11]]
229// CHECK: ret i32 %[[v12]]
230// CHECK: }
231}
232
233void callMemberPointerSingle(Single *o, void (Single::*memptr)()) {
234 (o->*memptr)();
235// Just look for an indirect thiscall.
236// CHECK: define void @"\01?callMemberPointerSingle@@{{.*}} #0 {
237// CHECK: call x86_thiscallcc void %{{.*}}(%{{.*}} %{{.*}})
238// CHECK: ret void
239// CHECK: }
240}
241
242void callMemberPointerMultiple(Multiple *o, void (Multiple::*memptr)()) {
243 (o->*memptr)();
244// CHECK: define void @"\01?callMemberPointerMultiple@@{{.*}} #0 {
245// CHECK: %[[memptr0:.*]] = extractvalue { i8*, i32 } %{{.*}}, 0
246// CHECK: %[[memptr1:.*]] = extractvalue { i8*, i32 } %{{.*}}, 1
247// CHECK: %[[this_adjusted:.*]] = getelementptr inbounds i8* %{{.*}}, i32 %[[memptr1]]
248// CHECK: %[[this:.*]] = bitcast i8* %[[this_adjusted]] to {{.*}}
249// CHECK: %[[fptr:.*]] = bitcast i8* %[[memptr0]] to {{.*}}
250// CHECK: call x86_thiscallcc void %[[fptr]](%{{.*}} %[[this]])
251// CHECK: ret void
252// CHECK: }
253}
254
255void callMemberPointerVirtualBase(Virtual *o, void (Virtual::*memptr)()) {
256 (o->*memptr)();
257// This shares a lot with virtual data member pointers.
258// CHECK: define void @"\01?callMemberPointerVirtualBase@@{{.*}} #0 {
259// CHECK: %[[memptr0:.*]] = extractvalue { i8*, i32, i32 } %{{.*}}, 0
260// CHECK: %[[memptr1:.*]] = extractvalue { i8*, i32, i32 } %{{.*}}, 1
261// CHECK: %[[memptr2:.*]] = extractvalue { i8*, i32, i32 } %{{.*}}, 2
262// CHECK: %[[vbptr:.*]] = getelementptr inbounds i8* %{{.*}}, i32 0
263// CHECK: %[[vbptr_a:.*]] = bitcast i8* %[[vbptr]] to i8**
264// CHECK: %[[vbtable:.*]] = load i8** %[[vbptr_a:.*]]
265// CHECK: %[[v7:.*]] = getelementptr inbounds i8* %[[vbtable]], i32 %[[memptr2]]
266// CHECK: %[[v8:.*]] = bitcast i8* %[[v7]] to i32*
267// CHECK: %[[vbase_offs:.*]] = load i32* %[[v8]]
268// CHECK: %[[v10:.*]] = getelementptr inbounds i8* %[[vbptr]], i32 %[[vbase_offs]]
269// CHECK: %[[this_adjusted:.*]] = getelementptr inbounds i8* %[[v10]], i32 %[[memptr1]]
270// CHECK: %[[fptr:.*]] = bitcast i8* %[[memptr0]] to void ({{.*}})
271// CHECK: %[[this:.*]] = bitcast i8* %[[this_adjusted]] to {{.*}}
272// CHECK: call x86_thiscallcc void %[[fptr]](%{{.*}} %[[this]])
273// CHECK: ret void
274// CHECK: }
275}
Reid Kleckner3d2f0002013-04-30 20:15:14 +0000276
277bool compareSingleFunctionMemptr(void (Single::*l)(), void (Single::*r)()) {
278 return l == r;
279// Should only be one comparison here.
280// CHECK: define zeroext i1 @"\01?compareSingleFunctionMemptr@@YA_NP8Single@@AEXXZ0@Z"{{.*}} {
281// CHECK-NOT: icmp
282// CHECK: %[[r:.*]] = icmp eq
283// CHECK-NOT: icmp
284// CHECK: ret i1 %[[r]]
285// CHECK: }
286}
287
288bool compareNeqSingleFunctionMemptr(void (Single::*l)(), void (Single::*r)()) {
289 return l != r;
290// Should only be one comparison here.
291// CHECK: define zeroext i1 @"\01?compareNeqSingleFunctionMemptr@@YA_NP8Single@@AEXXZ0@Z"{{.*}} {
292// CHECK-NOT: icmp
293// CHECK: %[[r:.*]] = icmp ne
294// CHECK-NOT: icmp
295// CHECK: ret i1 %[[r]]
296// CHECK: }
297}
298
299bool unspecFuncMemptrEq(void (Unspecified::*l)(), void (Unspecified::*r)()) {
300 return l == r;
301// CHECK: define zeroext i1 @"\01?unspecFuncMemptrEq@@YA_NP8Unspecified@@AEXXZ0@Z"{{.*}} {
302// CHECK: %[[lhs0:.*]] = extractvalue { i8*, i32, i32, i32 } %[[l:.*]], 0
303// CHECK: %{{.*}} = extractvalue { i8*, i32, i32, i32 } %[[r:.*]], 0
304// CHECK: %[[cmp0:.*]] = icmp eq i8* %[[lhs0]], %{{.*}}
305// CHECK: %{{.*}} = extractvalue { i8*, i32, i32, i32 } %[[l]], 1
306// CHECK: %{{.*}} = extractvalue { i8*, i32, i32, i32 } %[[r]], 1
307// CHECK: %[[cmp1:.*]] = icmp eq i32
308// CHECK: %{{.*}} = extractvalue { i8*, i32, i32, i32 } %[[l]], 2
309// CHECK: %{{.*}} = extractvalue { i8*, i32, i32, i32 } %[[r]], 2
310// CHECK: %[[cmp2:.*]] = icmp eq i32
311// CHECK: %[[res12:.*]] = and i1 %[[cmp1]], %[[cmp2]]
312// CHECK: %{{.*}} = extractvalue { i8*, i32, i32, i32 } %[[l]], 3
313// CHECK: %{{.*}} = extractvalue { i8*, i32, i32, i32 } %[[r]], 3
314// CHECK: %[[cmp3:.*]] = icmp eq i32
315// CHECK: %[[res123:.*]] = and i1 %[[res12]], %[[cmp3]]
316// CHECK: %[[iszero:.*]] = icmp eq i8* %[[lhs0]], null
317// CHECK: %[[bits_or_null:.*]] = or i1 %[[res123]], %[[iszero]]
318// CHECK: %{{.*}} = and i1 %[[bits_or_null]], %[[cmp0]]
319// CHECK: ret i1 %{{.*}}
320// CHECK: }
321}
322
323bool unspecFuncMemptrNeq(void (Unspecified::*l)(), void (Unspecified::*r)()) {
324 return l != r;
325// CHECK: define zeroext i1 @"\01?unspecFuncMemptrNeq@@YA_NP8Unspecified@@AEXXZ0@Z"{{.*}} {
326// CHECK: %[[lhs0:.*]] = extractvalue { i8*, i32, i32, i32 } %[[l:.*]], 0
327// CHECK: %{{.*}} = extractvalue { i8*, i32, i32, i32 } %[[r:.*]], 0
328// CHECK: %[[cmp0:.*]] = icmp ne i8* %[[lhs0]], %{{.*}}
329// CHECK: %{{.*}} = extractvalue { i8*, i32, i32, i32 } %[[l]], 1
330// CHECK: %{{.*}} = extractvalue { i8*, i32, i32, i32 } %[[r]], 1
331// CHECK: %[[cmp1:.*]] = icmp ne i32
332// CHECK: %{{.*}} = extractvalue { i8*, i32, i32, i32 } %[[l]], 2
333// CHECK: %{{.*}} = extractvalue { i8*, i32, i32, i32 } %[[r]], 2
334// CHECK: %[[cmp2:.*]] = icmp ne i32
335// CHECK: %[[res12:.*]] = or i1 %[[cmp1]], %[[cmp2]]
336// CHECK: %{{.*}} = extractvalue { i8*, i32, i32, i32 } %[[l]], 3
337// CHECK: %{{.*}} = extractvalue { i8*, i32, i32, i32 } %[[r]], 3
338// CHECK: %[[cmp3:.*]] = icmp ne i32
339// CHECK: %[[res123:.*]] = or i1 %[[res12]], %[[cmp3]]
340// CHECK: %[[iszero:.*]] = icmp ne i8* %[[lhs0]], null
341// CHECK: %[[bits_or_null:.*]] = and i1 %[[res123]], %[[iszero]]
342// CHECK: %{{.*}} = or i1 %[[bits_or_null]], %[[cmp0]]
343// CHECK: ret i1 %{{.*}}
344// CHECK: }
345}
346
347bool unspecDataMemptrEq(int Unspecified::*l, int Unspecified::*r) {
348 return l == r;
349// CHECK: define zeroext i1 @"\01?unspecDataMemptrEq@@YA_NPQUnspecified@@H0@Z"{{.*}} {
350// CHECK: extractvalue { i32, i32, i32 } %{{.*}}, 0
351// CHECK: extractvalue { i32, i32, i32 } %{{.*}}, 0
352// CHECK: icmp eq i32
353// CHECK: extractvalue { i32, i32, i32 } %{{.*}}, 1
354// CHECK: extractvalue { i32, i32, i32 } %{{.*}}, 1
355// CHECK: icmp eq i32
356// CHECK: extractvalue { i32, i32, i32 } %{{.*}}, 2
357// CHECK: extractvalue { i32, i32, i32 } %{{.*}}, 2
358// CHECK: icmp eq i32
359// CHECK: and i1
360// CHECK: and i1
361// CHECK: ret i1
362// CHECK: }
363}