blob: 26133004590b6d88ddad0a3b9d91fb18d5ded80b [file] [log] [blame]
Anders Carlssondbd920c2009-10-11 22:13:54 +00001//===--- CGVtable.cpp - Emit LLVM Code for C++ vtables --------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This contains code dealing with C++ code generation of virtual tables.
11//
12//===----------------------------------------------------------------------===//
13
14#include "CodeGenModule.h"
15#include "CodeGenFunction.h"
16
17#include "clang/AST/RecordLayout.h"
18
19using namespace clang;
20using namespace CodeGen;
21
22class VtableBuilder {
23public:
24 /// Index_t - Vtable index type.
25 typedef uint64_t Index_t;
26private:
27 std::vector<llvm::Constant *> &methods;
28 std::vector<llvm::Constant *> submethods;
29 llvm::Type *Ptr8Ty;
30 /// Class - The most derived class that this vtable is being built for.
31 const CXXRecordDecl *Class;
32 /// BLayout - Layout for the most derived class that this vtable is being
33 /// built for.
34 const ASTRecordLayout &BLayout;
35 llvm::SmallSet<const CXXRecordDecl *, 32> IndirectPrimary;
36 llvm::SmallSet<const CXXRecordDecl *, 32> SeenVBase;
37 llvm::Constant *rtti;
38 llvm::LLVMContext &VMContext;
39 CodeGenModule &CGM; // Per-module state.
40 /// Index - Maps a method decl into a vtable index. Useful for virtual
41 /// dispatch codegen.
42 llvm::DenseMap<const CXXMethodDecl *, Index_t> Index;
43 llvm::DenseMap<const CXXMethodDecl *, Index_t> VCall;
44 llvm::DenseMap<const CXXMethodDecl *, Index_t> VCallOffset;
45 llvm::DenseMap<const CXXRecordDecl *, Index_t> VBIndex;
46 typedef std::pair<Index_t, Index_t> CallOffset;
47 typedef llvm::DenseMap<const CXXMethodDecl *, CallOffset> Thunks_t;
48 Thunks_t Thunks;
49 typedef llvm::DenseMap<const CXXMethodDecl *,
Mike Stumpd9878a12009-10-13 10:55:21 +000050 std::pair<std::pair<CallOffset, CallOffset>,
51 CanQualType> > CovariantThunks_t;
Anders Carlssondbd920c2009-10-11 22:13:54 +000052 CovariantThunks_t CovariantThunks;
53 std::vector<Index_t> VCalls;
54 typedef CXXRecordDecl::method_iterator method_iter;
55 // FIXME: Linkage should follow vtable
56 const bool Extern;
57 const uint32_t LLVMPointerWidth;
58 Index_t extra;
59public:
60 VtableBuilder(std::vector<llvm::Constant *> &meth,
61 const CXXRecordDecl *c,
62 CodeGenModule &cgm)
63 : methods(meth), Class(c), BLayout(cgm.getContext().getASTRecordLayout(c)),
64 rtti(cgm.GenerateRtti(c)), VMContext(cgm.getModule().getContext()),
65 CGM(cgm), Extern(true),
66 LLVMPointerWidth(cgm.getContext().Target.getPointerWidth(0)) {
67 Ptr8Ty = llvm::PointerType::get(llvm::Type::getInt8Ty(VMContext), 0);
68 }
69
70 llvm::DenseMap<const CXXMethodDecl *, Index_t> &getIndex() { return Index; }
71 llvm::DenseMap<const CXXRecordDecl *, Index_t> &getVBIndex()
72 { return VBIndex; }
73
74 llvm::Constant *wrap(Index_t i) {
75 llvm::Constant *m;
76 m = llvm::ConstantInt::get(llvm::Type::getInt64Ty(VMContext), i);
77 return llvm::ConstantExpr::getIntToPtr(m, Ptr8Ty);
78 }
79
80 llvm::Constant *wrap(llvm::Constant *m) {
81 return llvm::ConstantExpr::getBitCast(m, Ptr8Ty);
82 }
83
84 void GenerateVBaseOffsets(std::vector<llvm::Constant *> &offsets,
Mike Stumpd9878a12009-10-13 10:55:21 +000085 const CXXRecordDecl *RD, uint64_t Offset,
86 bool updateVBIndex) {
Anders Carlssondbd920c2009-10-11 22:13:54 +000087 for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
88 e = RD->bases_end(); i != e; ++i) {
89 const CXXRecordDecl *Base =
90 cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
91 if (i->isVirtual() && !SeenVBase.count(Base)) {
92 SeenVBase.insert(Base);
93 int64_t BaseOffset = -(Offset/8) + BLayout.getVBaseClassOffset(Base)/8;
94 llvm::Constant *m = wrap(BaseOffset);
95 m = wrap((0?700:0) + BaseOffset);
Mike Stumpd9878a12009-10-13 10:55:21 +000096 if (updateVBIndex)
97 VBIndex[Base] = -(offsets.size()*LLVMPointerWidth/8)
98 - 3*LLVMPointerWidth/8;
Anders Carlssondbd920c2009-10-11 22:13:54 +000099 offsets.push_back(m);
100 }
Mike Stumpd9878a12009-10-13 10:55:21 +0000101 GenerateVBaseOffsets(offsets, Base, Offset, updateVBIndex);
Anders Carlssondbd920c2009-10-11 22:13:54 +0000102 }
103 }
104
105 void StartNewTable() {
106 SeenVBase.clear();
107 }
108
109 Index_t VBlookup(CXXRecordDecl *D, CXXRecordDecl *B);
110
111 /// getVbaseOffset - Returns the index into the vtable for the virtual base
112 /// offset for the given (B) virtual base of the derived class D.
113 Index_t getVbaseOffset(QualType qB, QualType qD) {
114 qD = qD->getAs<PointerType>()->getPointeeType();
115 qB = qB->getAs<PointerType>()->getPointeeType();
116 CXXRecordDecl *D = cast<CXXRecordDecl>(qD->getAs<RecordType>()->getDecl());
117 CXXRecordDecl *B = cast<CXXRecordDecl>(qB->getAs<RecordType>()->getDecl());
118 if (D != Class)
119 return VBlookup(D, B);
120 llvm::DenseMap<const CXXRecordDecl *, Index_t>::iterator i;
121 i = VBIndex.find(B);
122 if (i != VBIndex.end())
123 return i->second;
124 // FIXME: temporal botch, is this data here, by the time we need it?
125
126 assert(false && "FIXME: Locate the containing virtual base first");
127 return 0;
128 }
129
130 bool OverrideMethod(const CXXMethodDecl *MD, llvm::Constant *m,
131 bool MorallyVirtual, Index_t Offset) {
132 typedef CXXMethodDecl::method_iterator meth_iter;
133
134 // FIXME: Don't like the nested loops. For very large inheritance
135 // heirarchies we could have a table on the side with the final overridder
136 // and just replace each instance of an overridden method once. Would be
137 // nice to measure the cost/benefit on real code.
138
139 for (meth_iter mi = MD->begin_overridden_methods(),
140 e = MD->end_overridden_methods();
141 mi != e; ++mi) {
142 const CXXMethodDecl *OMD = *mi;
143 llvm::Constant *om;
144 om = CGM.GetAddrOfFunction(OMD, Ptr8Ty);
145 om = llvm::ConstantExpr::getBitCast(om, Ptr8Ty);
146
147 for (Index_t i = 0, e = submethods.size();
148 i != e; ++i) {
149 // FIXME: begin_overridden_methods might be too lax, covariance */
150 if (submethods[i] != om)
151 continue;
152 QualType nc_oret = OMD->getType()->getAs<FunctionType>()->getResultType();
153 CanQualType oret = CGM.getContext().getCanonicalType(nc_oret);
154 QualType nc_ret = MD->getType()->getAs<FunctionType>()->getResultType();
155 CanQualType ret = CGM.getContext().getCanonicalType(nc_ret);
156 CallOffset ReturnOffset = std::make_pair(0, 0);
157 if (oret != ret) {
158 // FIXME: calculate offsets for covariance
Mike Stumpd9878a12009-10-13 10:55:21 +0000159 Index_t nv = 0;
160 if (CovariantThunks.count(OMD)) {
161 oret = CovariantThunks[OMD].second;
162 CovariantThunks.erase(OMD);
163 }
164 ReturnOffset = std::make_pair(nv, getVbaseOffset(oret, ret));
Anders Carlssondbd920c2009-10-11 22:13:54 +0000165 }
166 Index[MD] = i;
167 submethods[i] = m;
168
169 Thunks.erase(OMD);
170 if (MorallyVirtual) {
171 Index_t &idx = VCall[OMD];
172 if (idx == 0) {
173 VCallOffset[MD] = Offset/8;
174 idx = VCalls.size()+1;
175 VCalls.push_back(0);
176 } else {
177 VCallOffset[MD] = VCallOffset[OMD];
178 VCalls[idx-1] = -VCallOffset[OMD] + Offset/8;
179 }
180 VCall[MD] = idx;
181 CallOffset ThisOffset;
182 // FIXME: calculate non-virtual offset
183 ThisOffset = std::make_pair(0, -((idx+extra+2)*LLVMPointerWidth/8));
184 if (ReturnOffset.first || ReturnOffset.second)
Mike Stumpd9878a12009-10-13 10:55:21 +0000185 CovariantThunks[MD] = std::make_pair(std::make_pair(ThisOffset,
186 ReturnOffset),
187 oret);
Anders Carlssondbd920c2009-10-11 22:13:54 +0000188 else
189 Thunks[MD] = ThisOffset;
190 return true;
191 }
192#if 0
193 // FIXME: finish off
194 int64_t O = VCallOffset[OMD] - Offset/8;
195 if (O) {
196 Thunks[MD] = std::make_pair(O, 0);
197 }
198#endif
199 return true;
200 }
201 }
202
203 return false;
204 }
205
206 void InstallThunks() {
207 for (Thunks_t::iterator i = Thunks.begin(), e = Thunks.end();
208 i != e; ++i) {
209 const CXXMethodDecl *MD = i->first;
210 Index_t idx = Index[MD];
211 Index_t nv_O = i->second.first;
212 Index_t v_O = i->second.second;
213 submethods[idx] = CGM.BuildThunk(MD, Extern, nv_O, v_O);
214 }
215 Thunks.clear();
216 for (CovariantThunks_t::iterator i = CovariantThunks.begin(),
217 e = CovariantThunks.end();
218 i != e; ++i) {
219 const CXXMethodDecl *MD = i->first;
220 Index_t idx = Index[MD];
Mike Stumpd9878a12009-10-13 10:55:21 +0000221 Index_t nv_t = i->second.first.first.first;
222 Index_t v_t = i->second.first.first.second;
223 Index_t nv_r = i->second.first.second.first;
224 Index_t v_r = i->second.first.second.second;
Anders Carlssondbd920c2009-10-11 22:13:54 +0000225 submethods[idx] = CGM.BuildCovariantThunk(MD, Extern, nv_t, v_t, nv_r,
226 v_r);
227 }
228 CovariantThunks.clear();
229 }
230
231 void OverrideMethods(std::vector<std::pair<const CXXRecordDecl *,
232 int64_t> > *Path, bool MorallyVirtual) {
233 for (std::vector<std::pair<const CXXRecordDecl *,
234 int64_t> >::reverse_iterator i =Path->rbegin(),
235 e = Path->rend(); i != e; ++i) {
236 const CXXRecordDecl *RD = i->first;
237 int64_t Offset = i->second;
238 for (method_iter mi = RD->method_begin(), me = RD->method_end(); mi != me;
239 ++mi) {
240 if (!mi->isVirtual())
241 continue;
242
243 const CXXMethodDecl *MD = *mi;
244 llvm::Constant *m = 0;
245 if (const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(MD))
246 m = wrap(CGM.GetAddrOfCXXDestructor(Dtor, Dtor_Complete));
247 else {
248 const FunctionProtoType *FPT =
249 MD->getType()->getAs<FunctionProtoType>();
250 const llvm::Type *Ty =
251 CGM.getTypes().GetFunctionType(CGM.getTypes().getFunctionInfo(MD),
252 FPT->isVariadic());
253
254 m = wrap(CGM.GetAddrOfFunction(MD, Ty));
255 }
256
257 OverrideMethod(MD, m, MorallyVirtual, Offset);
258 }
259 }
260 }
261
262 void AddMethod(const CXXMethodDecl *MD, bool MorallyVirtual, Index_t Offset) {
263 llvm::Constant *m = 0;
264 if (const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(MD))
265 m = wrap(CGM.GetAddrOfCXXDestructor(Dtor, Dtor_Complete));
266 else {
267 const FunctionProtoType *FPT = MD->getType()->getAs<FunctionProtoType>();
268 const llvm::Type *Ty =
269 CGM.getTypes().GetFunctionType(CGM.getTypes().getFunctionInfo(MD),
270 FPT->isVariadic());
271
272 m = wrap(CGM.GetAddrOfFunction(MD, Ty));
273 }
274
275 // If we can find a previously allocated slot for this, reuse it.
276 if (OverrideMethod(MD, m, MorallyVirtual, Offset))
277 return;
278
279 // else allocate a new slot.
280 Index[MD] = submethods.size();
281 submethods.push_back(m);
282 if (MorallyVirtual) {
283 VCallOffset[MD] = Offset/8;
284 Index_t &idx = VCall[MD];
285 // Allocate the first one, after that, we reuse the previous one.
286 if (idx == 0) {
287 idx = VCalls.size()+1;
288 VCalls.push_back(0);
289 }
290 }
291 }
292
293 void AddMethods(const CXXRecordDecl *RD, bool MorallyVirtual,
294 Index_t Offset) {
295 for (method_iter mi = RD->method_begin(), me = RD->method_end(); mi != me;
296 ++mi)
297 if (mi->isVirtual())
298 AddMethod(*mi, MorallyVirtual, Offset);
299 }
300
301 void NonVirtualBases(const CXXRecordDecl *RD, const ASTRecordLayout &Layout,
302 const CXXRecordDecl *PrimaryBase,
303 bool PrimaryBaseWasVirtual, bool MorallyVirtual,
304 int64_t Offset) {
305 for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
306 e = RD->bases_end(); i != e; ++i) {
307 if (i->isVirtual())
308 continue;
309 const CXXRecordDecl *Base =
310 cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
311 if (Base != PrimaryBase || PrimaryBaseWasVirtual) {
312 uint64_t o = Offset + Layout.getBaseClassOffset(Base);
313 StartNewTable();
314 std::vector<std::pair<const CXXRecordDecl *,
315 int64_t> > S;
316 S.push_back(std::make_pair(RD, Offset));
317 GenerateVtableForBase(Base, MorallyVirtual, o, false, &S);
318 }
319 }
320 }
321
322 Index_t end(const CXXRecordDecl *RD, std::vector<llvm::Constant *> &offsets,
323 const ASTRecordLayout &Layout,
324 const CXXRecordDecl *PrimaryBase,
325 bool PrimaryBaseWasVirtual, bool MorallyVirtual,
326 int64_t Offset, bool ForVirtualBase) {
327 StartNewTable();
328 extra = 0;
329 // FIXME: Cleanup.
330 if (!ForVirtualBase) {
331 // then virtual base offsets...
332 for (std::vector<llvm::Constant *>::reverse_iterator i = offsets.rbegin(),
333 e = offsets.rend(); i != e; ++i)
334 methods.push_back(*i);
335 }
336
337 // The vcalls come first...
338 for (std::vector<Index_t>::reverse_iterator i=VCalls.rbegin(),
339 e=VCalls.rend();
340 i != e; ++i)
341 methods.push_back(wrap((0?600:0) + *i));
342 VCalls.clear();
343
344 if (ForVirtualBase) {
345 // then virtual base offsets...
346 for (std::vector<llvm::Constant *>::reverse_iterator i = offsets.rbegin(),
347 e = offsets.rend(); i != e; ++i)
348 methods.push_back(*i);
349 }
350
351 methods.push_back(wrap(-(Offset/8)));
352 methods.push_back(rtti);
353 Index_t AddressPoint = methods.size();
354
355 InstallThunks();
356 methods.insert(methods.end(), submethods.begin(), submethods.end());
357 submethods.clear();
358
359 // and then the non-virtual bases.
360 NonVirtualBases(RD, Layout, PrimaryBase, PrimaryBaseWasVirtual,
361 MorallyVirtual, Offset);
362 return AddressPoint;
363 }
364
365 void Primaries(const CXXRecordDecl *RD, bool MorallyVirtual, int64_t Offset) {
366 if (!RD->isDynamicClass())
367 return;
368
369 const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(RD);
370 const CXXRecordDecl *PrimaryBase = Layout.getPrimaryBase();
371 const bool PrimaryBaseWasVirtual = Layout.getPrimaryBaseWasVirtual();
372
373 // vtables are composed from the chain of primaries.
374 if (PrimaryBase) {
375 if (PrimaryBaseWasVirtual)
376 IndirectPrimary.insert(PrimaryBase);
377 Primaries(PrimaryBase, PrimaryBaseWasVirtual|MorallyVirtual, Offset);
378 }
379
380 // And add the virtuals for the class to the primary vtable.
381 AddMethods(RD, MorallyVirtual, Offset);
382 }
383
384 int64_t GenerateVtableForBase(const CXXRecordDecl *RD,
385 bool MorallyVirtual = false, int64_t Offset = 0,
386 bool ForVirtualBase = false,
387 std::vector<std::pair<const CXXRecordDecl *,
388 int64_t> > *Path = 0) {
389 if (!RD->isDynamicClass())
390 return 0;
391
392 const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(RD);
393 const CXXRecordDecl *PrimaryBase = Layout.getPrimaryBase();
394 const bool PrimaryBaseWasVirtual = Layout.getPrimaryBaseWasVirtual();
395
396 std::vector<llvm::Constant *> offsets;
397 extra = 0;
Mike Stumpd9878a12009-10-13 10:55:21 +0000398 GenerateVBaseOffsets(offsets, RD, Offset, !ForVirtualBase);
Anders Carlssondbd920c2009-10-11 22:13:54 +0000399 if (ForVirtualBase)
400 extra = offsets.size();
401
402 // vtables are composed from the chain of primaries.
403 if (PrimaryBase) {
404 if (PrimaryBaseWasVirtual)
405 IndirectPrimary.insert(PrimaryBase);
406 Primaries(PrimaryBase, PrimaryBaseWasVirtual|MorallyVirtual, Offset);
407 }
408
409 // And add the virtuals for the class to the primary vtable.
410 AddMethods(RD, MorallyVirtual, Offset);
411
412 if (Path)
413 OverrideMethods(Path, MorallyVirtual);
414
415 return end(RD, offsets, Layout, PrimaryBase, PrimaryBaseWasVirtual,
416 MorallyVirtual, Offset, ForVirtualBase);
417 }
418
419 void GenerateVtableForVBases(const CXXRecordDecl *RD,
420 int64_t Offset = 0,
421 std::vector<std::pair<const CXXRecordDecl *,
422 int64_t> > *Path = 0) {
423 bool alloc = false;
424 if (Path == 0) {
425 alloc = true;
426 Path = new std::vector<std::pair<const CXXRecordDecl *,
427 int64_t> >;
428 }
429 // FIXME: We also need to override using all paths to a virtual base,
430 // right now, we just process the first path
431 Path->push_back(std::make_pair(RD, Offset));
432 for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
433 e = RD->bases_end(); i != e; ++i) {
434 const CXXRecordDecl *Base =
435 cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
436 if (i->isVirtual() && !IndirectPrimary.count(Base)) {
437 // Mark it so we don't output it twice.
438 IndirectPrimary.insert(Base);
439 StartNewTable();
440 int64_t BaseOffset = BLayout.getVBaseClassOffset(Base);
441 GenerateVtableForBase(Base, true, BaseOffset, true, Path);
442 }
443 int64_t BaseOffset = Offset;
444 if (i->isVirtual())
445 BaseOffset = BLayout.getVBaseClassOffset(Base);
446 if (Base->getNumVBases())
447 GenerateVtableForVBases(Base, BaseOffset, Path);
448 }
449 Path->pop_back();
450 if (alloc)
451 delete Path;
452 }
453};
454
455
456VtableBuilder::Index_t VtableBuilder::VBlookup(CXXRecordDecl *D,
457 CXXRecordDecl *B) {
458 return CGM.getVtableInfo().getVirtualBaseOffsetIndex(D, B);
459}
460
461int64_t CGVtableInfo::getMethodVtableIndex(const CXXMethodDecl *MD) {
462 MD = MD->getCanonicalDecl();
463
464 MethodVtableIndicesTy::iterator I = MethodVtableIndices.find(MD);
465 if (I != MethodVtableIndices.end())
466 return I->second;
467
468 const CXXRecordDecl *RD = MD->getParent();
469
470 std::vector<llvm::Constant *> methods;
471 // FIXME: This seems expensive. Can we do a partial job to get
472 // just this data.
473 VtableBuilder b(methods, RD, CGM);
474 b.GenerateVtableForBase(RD);
475 b.GenerateVtableForVBases(RD);
476
477 MethodVtableIndices.insert(b.getIndex().begin(),
478 b.getIndex().end());
479
480 I = MethodVtableIndices.find(MD);
481 assert(I != MethodVtableIndices.end() && "Did not find index!");
482 return I->second;
483}
484
485int64_t CGVtableInfo::getVirtualBaseOffsetIndex(const CXXRecordDecl *RD,
486 const CXXRecordDecl *VBase) {
487 ClassPairTy ClassPair(RD, VBase);
488
489 VirtualBaseClassIndiciesTy::iterator I =
490 VirtualBaseClassIndicies.find(ClassPair);
491 if (I != VirtualBaseClassIndicies.end())
492 return I->second;
493
494 std::vector<llvm::Constant *> methods;
495 // FIXME: This seems expensive. Can we do a partial job to get
496 // just this data.
497 VtableBuilder b(methods, RD, CGM);
498 b.GenerateVtableForBase(RD);
499 b.GenerateVtableForVBases(RD);
500
501 for (llvm::DenseMap<const CXXRecordDecl *, uint64_t>::iterator I =
502 b.getVBIndex().begin(), E = b.getVBIndex().end(); I != E; ++I) {
503 // Insert all types.
504 ClassPairTy ClassPair(RD, I->first);
505
506 VirtualBaseClassIndicies.insert(std::make_pair(ClassPair, I->second));
507 }
508
509 I = VirtualBaseClassIndicies.find(ClassPair);
510 assert(I != VirtualBaseClassIndicies.end() && "Did not find index!");
511
512 return I->second;
513}
514
515llvm::Value *CodeGenFunction::GenerateVtable(const CXXRecordDecl *RD) {
516 llvm::SmallString<256> OutName;
517 llvm::raw_svector_ostream Out(OutName);
Anders Carlssondbd920c2009-10-11 22:13:54 +0000518 mangleCXXVtable(CGM.getMangleContext(), RD, Out);
Benjamin Kramer7a9474e2009-10-11 22:57:54 +0000519
Anders Carlssondbd920c2009-10-11 22:13:54 +0000520 llvm::GlobalVariable::LinkageTypes linktype;
521 linktype = llvm::GlobalValue::WeakAnyLinkage;
522 std::vector<llvm::Constant *> methods;
523 llvm::Type *Ptr8Ty=llvm::PointerType::get(llvm::Type::getInt8Ty(VMContext),0);
524 int64_t AddressPoint;
525
526 VtableBuilder b(methods, RD, CGM);
527
528 // First comes the vtables for all the non-virtual bases...
529 AddressPoint = b.GenerateVtableForBase(RD);
530
531 // then the vtables for all the virtual bases.
532 b.GenerateVtableForVBases(RD);
533
534 llvm::Constant *C;
535 llvm::ArrayType *type = llvm::ArrayType::get(Ptr8Ty, methods.size());
536 C = llvm::ConstantArray::get(type, methods);
537 llvm::Value *vtable = new llvm::GlobalVariable(CGM.getModule(), type, true,
538 linktype, C, Out.str());
539 vtable = Builder.CreateBitCast(vtable, Ptr8Ty);
540 vtable = Builder.CreateGEP(vtable,
541 llvm::ConstantInt::get(llvm::Type::getInt64Ty(VMContext),
542 AddressPoint*LLVMPointerWidth/8));
543 return vtable;
544}