blob: b84f34cf443aa9e4f84e581d47c680967f015e84 [file] [log] [blame]
Anders Carlsson2bb27f52009-10-11 22:13:54 +00001//===--- CGVtable.cpp - Emit LLVM Code for C++ vtables --------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This contains code dealing with C++ code generation of virtual tables.
11//
12//===----------------------------------------------------------------------===//
13
14#include "CodeGenModule.h"
15#include "CodeGenFunction.h"
16
17#include "clang/AST/RecordLayout.h"
18
19using namespace clang;
20using namespace CodeGen;
21
22class VtableBuilder {
23public:
24 /// Index_t - Vtable index type.
25 typedef uint64_t Index_t;
26private:
27 std::vector<llvm::Constant *> &methods;
28 std::vector<llvm::Constant *> submethods;
29 llvm::Type *Ptr8Ty;
30 /// Class - The most derived class that this vtable is being built for.
31 const CXXRecordDecl *Class;
32 /// BLayout - Layout for the most derived class that this vtable is being
33 /// built for.
34 const ASTRecordLayout &BLayout;
35 llvm::SmallSet<const CXXRecordDecl *, 32> IndirectPrimary;
36 llvm::SmallSet<const CXXRecordDecl *, 32> SeenVBase;
37 llvm::Constant *rtti;
38 llvm::LLVMContext &VMContext;
39 CodeGenModule &CGM; // Per-module state.
40 /// Index - Maps a method decl into a vtable index. Useful for virtual
41 /// dispatch codegen.
42 llvm::DenseMap<const CXXMethodDecl *, Index_t> Index;
43 llvm::DenseMap<const CXXMethodDecl *, Index_t> VCall;
44 llvm::DenseMap<const CXXMethodDecl *, Index_t> VCallOffset;
45 llvm::DenseMap<const CXXRecordDecl *, Index_t> VBIndex;
46 typedef std::pair<Index_t, Index_t> CallOffset;
47 typedef llvm::DenseMap<const CXXMethodDecl *, CallOffset> Thunks_t;
48 Thunks_t Thunks;
49 typedef llvm::DenseMap<const CXXMethodDecl *,
Mike Stump87876a02009-10-13 10:55:21 +000050 std::pair<std::pair<CallOffset, CallOffset>,
51 CanQualType> > CovariantThunks_t;
Anders Carlsson2bb27f52009-10-11 22:13:54 +000052 CovariantThunks_t CovariantThunks;
53 std::vector<Index_t> VCalls;
54 typedef CXXRecordDecl::method_iterator method_iter;
55 // FIXME: Linkage should follow vtable
56 const bool Extern;
57 const uint32_t LLVMPointerWidth;
58 Index_t extra;
59public:
60 VtableBuilder(std::vector<llvm::Constant *> &meth,
61 const CXXRecordDecl *c,
62 CodeGenModule &cgm)
63 : methods(meth), Class(c), BLayout(cgm.getContext().getASTRecordLayout(c)),
64 rtti(cgm.GenerateRtti(c)), VMContext(cgm.getModule().getContext()),
65 CGM(cgm), Extern(true),
66 LLVMPointerWidth(cgm.getContext().Target.getPointerWidth(0)) {
67 Ptr8Ty = llvm::PointerType::get(llvm::Type::getInt8Ty(VMContext), 0);
68 }
69
70 llvm::DenseMap<const CXXMethodDecl *, Index_t> &getIndex() { return Index; }
71 llvm::DenseMap<const CXXRecordDecl *, Index_t> &getVBIndex()
72 { return VBIndex; }
73
74 llvm::Constant *wrap(Index_t i) {
75 llvm::Constant *m;
76 m = llvm::ConstantInt::get(llvm::Type::getInt64Ty(VMContext), i);
77 return llvm::ConstantExpr::getIntToPtr(m, Ptr8Ty);
78 }
79
80 llvm::Constant *wrap(llvm::Constant *m) {
81 return llvm::ConstantExpr::getBitCast(m, Ptr8Ty);
82 }
83
84 void GenerateVBaseOffsets(std::vector<llvm::Constant *> &offsets,
Mike Stump87876a02009-10-13 10:55:21 +000085 const CXXRecordDecl *RD, uint64_t Offset,
Mike Stump28431212009-10-13 22:54:56 +000086 bool updateVBIndex, Index_t current_vbindex) {
Anders Carlsson2bb27f52009-10-11 22:13:54 +000087 for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
88 e = RD->bases_end(); i != e; ++i) {
89 const CXXRecordDecl *Base =
90 cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
Mike Stump28431212009-10-13 22:54:56 +000091 Index_t next_vbindex = current_vbindex;
Anders Carlsson2bb27f52009-10-11 22:13:54 +000092 if (i->isVirtual() && !SeenVBase.count(Base)) {
93 SeenVBase.insert(Base);
94 int64_t BaseOffset = -(Offset/8) + BLayout.getVBaseClassOffset(Base)/8;
95 llvm::Constant *m = wrap(BaseOffset);
96 m = wrap((0?700:0) + BaseOffset);
Mike Stump28431212009-10-13 22:54:56 +000097 if (updateVBIndex) {
98 next_vbindex = (ssize_t)(-(offsets.size()*LLVMPointerWidth/8)
99 - 3*LLVMPointerWidth/8);
100 VBIndex[Base] = next_vbindex;
101 }
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000102 offsets.push_back(m);
103 }
Mike Stump28431212009-10-13 22:54:56 +0000104 // We also record offsets for non-virtual bases to closest enclosing
105 // virtual base. We do this so that we don't have to search
106 // for the nearst virtual base class when generating thunks.
107 if (updateVBIndex && VBIndex.count(Base) == 0)
108 VBIndex[Base] = next_vbindex;
109 GenerateVBaseOffsets(offsets, Base, Offset, updateVBIndex, next_vbindex);
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000110 }
111 }
112
113 void StartNewTable() {
114 SeenVBase.clear();
115 }
116
117 Index_t VBlookup(CXXRecordDecl *D, CXXRecordDecl *B);
118
119 /// getVbaseOffset - Returns the index into the vtable for the virtual base
120 /// offset for the given (B) virtual base of the derived class D.
121 Index_t getVbaseOffset(QualType qB, QualType qD) {
122 qD = qD->getAs<PointerType>()->getPointeeType();
123 qB = qB->getAs<PointerType>()->getPointeeType();
124 CXXRecordDecl *D = cast<CXXRecordDecl>(qD->getAs<RecordType>()->getDecl());
125 CXXRecordDecl *B = cast<CXXRecordDecl>(qB->getAs<RecordType>()->getDecl());
126 if (D != Class)
127 return VBlookup(D, B);
128 llvm::DenseMap<const CXXRecordDecl *, Index_t>::iterator i;
129 i = VBIndex.find(B);
130 if (i != VBIndex.end())
131 return i->second;
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000132
Mike Stump28431212009-10-13 22:54:56 +0000133 assert(false && "FIXME: Base not found");
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000134 return 0;
135 }
136
137 bool OverrideMethod(const CXXMethodDecl *MD, llvm::Constant *m,
138 bool MorallyVirtual, Index_t Offset) {
139 typedef CXXMethodDecl::method_iterator meth_iter;
140
141 // FIXME: Don't like the nested loops. For very large inheritance
142 // heirarchies we could have a table on the side with the final overridder
143 // and just replace each instance of an overridden method once. Would be
144 // nice to measure the cost/benefit on real code.
145
146 for (meth_iter mi = MD->begin_overridden_methods(),
147 e = MD->end_overridden_methods();
148 mi != e; ++mi) {
149 const CXXMethodDecl *OMD = *mi;
150 llvm::Constant *om;
151 om = CGM.GetAddrOfFunction(OMD, Ptr8Ty);
152 om = llvm::ConstantExpr::getBitCast(om, Ptr8Ty);
153
154 for (Index_t i = 0, e = submethods.size();
155 i != e; ++i) {
156 // FIXME: begin_overridden_methods might be too lax, covariance */
157 if (submethods[i] != om)
158 continue;
159 QualType nc_oret = OMD->getType()->getAs<FunctionType>()->getResultType();
160 CanQualType oret = CGM.getContext().getCanonicalType(nc_oret);
161 QualType nc_ret = MD->getType()->getAs<FunctionType>()->getResultType();
162 CanQualType ret = CGM.getContext().getCanonicalType(nc_ret);
163 CallOffset ReturnOffset = std::make_pair(0, 0);
164 if (oret != ret) {
165 // FIXME: calculate offsets for covariance
Mike Stump87876a02009-10-13 10:55:21 +0000166 Index_t nv = 0;
167 if (CovariantThunks.count(OMD)) {
168 oret = CovariantThunks[OMD].second;
169 CovariantThunks.erase(OMD);
170 }
171 ReturnOffset = std::make_pair(nv, getVbaseOffset(oret, ret));
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000172 }
173 Index[MD] = i;
174 submethods[i] = m;
175
176 Thunks.erase(OMD);
177 if (MorallyVirtual) {
178 Index_t &idx = VCall[OMD];
179 if (idx == 0) {
180 VCallOffset[MD] = Offset/8;
181 idx = VCalls.size()+1;
182 VCalls.push_back(0);
183 } else {
184 VCallOffset[MD] = VCallOffset[OMD];
185 VCalls[idx-1] = -VCallOffset[OMD] + Offset/8;
186 }
187 VCall[MD] = idx;
188 CallOffset ThisOffset;
189 // FIXME: calculate non-virtual offset
190 ThisOffset = std::make_pair(0, -((idx+extra+2)*LLVMPointerWidth/8));
191 if (ReturnOffset.first || ReturnOffset.second)
Mike Stump87876a02009-10-13 10:55:21 +0000192 CovariantThunks[MD] = std::make_pair(std::make_pair(ThisOffset,
193 ReturnOffset),
194 oret);
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000195 else
196 Thunks[MD] = ThisOffset;
197 return true;
198 }
Mike Stump28431212009-10-13 22:54:56 +0000199
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000200 // FIXME: finish off
201 int64_t O = VCallOffset[OMD] - Offset/8;
Mike Stump28431212009-10-13 22:54:56 +0000202 if (O || ReturnOffset.first || ReturnOffset.second) {
203 CallOffset ThisOffset = std::make_pair(O, 0);
204
205 if (ReturnOffset.first || ReturnOffset.second)
206 CovariantThunks[MD] = std::make_pair(std::make_pair(ThisOffset,
207 ReturnOffset),
208 oret);
209 else
210 Thunks[MD] = ThisOffset;
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000211 }
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000212 return true;
213 }
214 }
215
216 return false;
217 }
218
219 void InstallThunks() {
220 for (Thunks_t::iterator i = Thunks.begin(), e = Thunks.end();
221 i != e; ++i) {
222 const CXXMethodDecl *MD = i->first;
223 Index_t idx = Index[MD];
224 Index_t nv_O = i->second.first;
225 Index_t v_O = i->second.second;
226 submethods[idx] = CGM.BuildThunk(MD, Extern, nv_O, v_O);
227 }
228 Thunks.clear();
229 for (CovariantThunks_t::iterator i = CovariantThunks.begin(),
230 e = CovariantThunks.end();
231 i != e; ++i) {
232 const CXXMethodDecl *MD = i->first;
233 Index_t idx = Index[MD];
Mike Stump87876a02009-10-13 10:55:21 +0000234 Index_t nv_t = i->second.first.first.first;
235 Index_t v_t = i->second.first.first.second;
236 Index_t nv_r = i->second.first.second.first;
237 Index_t v_r = i->second.first.second.second;
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000238 submethods[idx] = CGM.BuildCovariantThunk(MD, Extern, nv_t, v_t, nv_r,
239 v_r);
240 }
241 CovariantThunks.clear();
242 }
243
244 void OverrideMethods(std::vector<std::pair<const CXXRecordDecl *,
245 int64_t> > *Path, bool MorallyVirtual) {
246 for (std::vector<std::pair<const CXXRecordDecl *,
247 int64_t> >::reverse_iterator i =Path->rbegin(),
248 e = Path->rend(); i != e; ++i) {
249 const CXXRecordDecl *RD = i->first;
250 int64_t Offset = i->second;
251 for (method_iter mi = RD->method_begin(), me = RD->method_end(); mi != me;
252 ++mi) {
253 if (!mi->isVirtual())
254 continue;
255
256 const CXXMethodDecl *MD = *mi;
257 llvm::Constant *m = 0;
258 if (const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(MD))
259 m = wrap(CGM.GetAddrOfCXXDestructor(Dtor, Dtor_Complete));
260 else {
261 const FunctionProtoType *FPT =
262 MD->getType()->getAs<FunctionProtoType>();
263 const llvm::Type *Ty =
264 CGM.getTypes().GetFunctionType(CGM.getTypes().getFunctionInfo(MD),
265 FPT->isVariadic());
266
267 m = wrap(CGM.GetAddrOfFunction(MD, Ty));
268 }
269
270 OverrideMethod(MD, m, MorallyVirtual, Offset);
271 }
272 }
273 }
274
275 void AddMethod(const CXXMethodDecl *MD, bool MorallyVirtual, Index_t Offset) {
276 llvm::Constant *m = 0;
277 if (const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(MD))
278 m = wrap(CGM.GetAddrOfCXXDestructor(Dtor, Dtor_Complete));
279 else {
280 const FunctionProtoType *FPT = MD->getType()->getAs<FunctionProtoType>();
281 const llvm::Type *Ty =
282 CGM.getTypes().GetFunctionType(CGM.getTypes().getFunctionInfo(MD),
283 FPT->isVariadic());
284
285 m = wrap(CGM.GetAddrOfFunction(MD, Ty));
286 }
287
288 // If we can find a previously allocated slot for this, reuse it.
289 if (OverrideMethod(MD, m, MorallyVirtual, Offset))
290 return;
291
292 // else allocate a new slot.
293 Index[MD] = submethods.size();
294 submethods.push_back(m);
295 if (MorallyVirtual) {
296 VCallOffset[MD] = Offset/8;
297 Index_t &idx = VCall[MD];
298 // Allocate the first one, after that, we reuse the previous one.
299 if (idx == 0) {
300 idx = VCalls.size()+1;
301 VCalls.push_back(0);
302 }
303 }
304 }
305
306 void AddMethods(const CXXRecordDecl *RD, bool MorallyVirtual,
307 Index_t Offset) {
308 for (method_iter mi = RD->method_begin(), me = RD->method_end(); mi != me;
309 ++mi)
310 if (mi->isVirtual())
311 AddMethod(*mi, MorallyVirtual, Offset);
312 }
313
314 void NonVirtualBases(const CXXRecordDecl *RD, const ASTRecordLayout &Layout,
315 const CXXRecordDecl *PrimaryBase,
316 bool PrimaryBaseWasVirtual, bool MorallyVirtual,
317 int64_t Offset) {
318 for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
319 e = RD->bases_end(); i != e; ++i) {
320 if (i->isVirtual())
321 continue;
322 const CXXRecordDecl *Base =
323 cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
324 if (Base != PrimaryBase || PrimaryBaseWasVirtual) {
325 uint64_t o = Offset + Layout.getBaseClassOffset(Base);
326 StartNewTable();
327 std::vector<std::pair<const CXXRecordDecl *,
328 int64_t> > S;
329 S.push_back(std::make_pair(RD, Offset));
330 GenerateVtableForBase(Base, MorallyVirtual, o, false, &S);
331 }
332 }
333 }
334
Mike Stumpb21c4ee2009-10-14 18:14:51 +0000335// #define D(X) do { X; } while (0)
336#define D(X)
337
338 void insertVCalls(int InsertionPoint) {
339 llvm::Constant *e = 0;
340 D(VCalls.insert(VCalls.begin(), 673));
341 D(VCalls.push_back(672));
342 methods.insert(methods.begin() + InsertionPoint, VCalls.size()/*+2*/, e);
343 // The vcalls come first...
344 for (std::vector<Index_t>::reverse_iterator i = VCalls.rbegin(),
345 e = VCalls.rend();
346 i != e; ++i)
347 methods[InsertionPoint++] = wrap((0?600:0) + *i);
348 VCalls.clear();
349 }
350
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000351 Index_t end(const CXXRecordDecl *RD, std::vector<llvm::Constant *> &offsets,
352 const ASTRecordLayout &Layout,
353 const CXXRecordDecl *PrimaryBase,
354 bool PrimaryBaseWasVirtual, bool MorallyVirtual,
355 int64_t Offset, bool ForVirtualBase) {
356 StartNewTable();
357 extra = 0;
358 // FIXME: Cleanup.
359 if (!ForVirtualBase) {
Mike Stumpb21c4ee2009-10-14 18:14:51 +0000360 D(methods.push_back(wrap(666)));
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000361 // then virtual base offsets...
362 for (std::vector<llvm::Constant *>::reverse_iterator i = offsets.rbegin(),
363 e = offsets.rend(); i != e; ++i)
364 methods.push_back(*i);
Mike Stumpb21c4ee2009-10-14 18:14:51 +0000365 D(methods.push_back(wrap(667)));
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000366 }
367
Mike Stumpb21c4ee2009-10-14 18:14:51 +0000368 bool DeferVCalls = MorallyVirtual || ForVirtualBase;
369 int VCallInsertionPoint = methods.size();
370 if (!DeferVCalls) {
371 insertVCalls(VCallInsertionPoint);
372 }
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000373
374 if (ForVirtualBase) {
Mike Stumpb21c4ee2009-10-14 18:14:51 +0000375 D(methods.push_back(wrap(668)));
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000376 // then virtual base offsets...
377 for (std::vector<llvm::Constant *>::reverse_iterator i = offsets.rbegin(),
378 e = offsets.rend(); i != e; ++i)
379 methods.push_back(*i);
Mike Stumpb21c4ee2009-10-14 18:14:51 +0000380 D(methods.push_back(wrap(669)));
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000381 }
382
383 methods.push_back(wrap(-(Offset/8)));
384 methods.push_back(rtti);
385 Index_t AddressPoint = methods.size();
386
387 InstallThunks();
388 methods.insert(methods.end(), submethods.begin(), submethods.end());
389 submethods.clear();
390
391 // and then the non-virtual bases.
392 NonVirtualBases(RD, Layout, PrimaryBase, PrimaryBaseWasVirtual,
393 MorallyVirtual, Offset);
Mike Stumpb21c4ee2009-10-14 18:14:51 +0000394
395 if (ForVirtualBase) {
396 D(methods.push_back(wrap(670)));
397 insertVCalls(VCallInsertionPoint);
398 AddressPoint += VCalls.size();
399 D(methods.push_back(wrap(671)));
400 }
401
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000402 return AddressPoint;
403 }
404
405 void Primaries(const CXXRecordDecl *RD, bool MorallyVirtual, int64_t Offset) {
406 if (!RD->isDynamicClass())
407 return;
408
409 const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(RD);
410 const CXXRecordDecl *PrimaryBase = Layout.getPrimaryBase();
411 const bool PrimaryBaseWasVirtual = Layout.getPrimaryBaseWasVirtual();
412
413 // vtables are composed from the chain of primaries.
414 if (PrimaryBase) {
415 if (PrimaryBaseWasVirtual)
416 IndirectPrimary.insert(PrimaryBase);
417 Primaries(PrimaryBase, PrimaryBaseWasVirtual|MorallyVirtual, Offset);
418 }
419
420 // And add the virtuals for the class to the primary vtable.
421 AddMethods(RD, MorallyVirtual, Offset);
422 }
423
424 int64_t GenerateVtableForBase(const CXXRecordDecl *RD,
425 bool MorallyVirtual = false, int64_t Offset = 0,
426 bool ForVirtualBase = false,
427 std::vector<std::pair<const CXXRecordDecl *,
428 int64_t> > *Path = 0) {
429 if (!RD->isDynamicClass())
430 return 0;
431
432 const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(RD);
433 const CXXRecordDecl *PrimaryBase = Layout.getPrimaryBase();
434 const bool PrimaryBaseWasVirtual = Layout.getPrimaryBaseWasVirtual();
435
436 std::vector<llvm::Constant *> offsets;
437 extra = 0;
Mike Stump28431212009-10-13 22:54:56 +0000438 GenerateVBaseOffsets(offsets, RD, Offset, !ForVirtualBase, 0);
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000439 if (ForVirtualBase)
440 extra = offsets.size();
441
442 // vtables are composed from the chain of primaries.
443 if (PrimaryBase) {
444 if (PrimaryBaseWasVirtual)
445 IndirectPrimary.insert(PrimaryBase);
446 Primaries(PrimaryBase, PrimaryBaseWasVirtual|MorallyVirtual, Offset);
447 }
448
449 // And add the virtuals for the class to the primary vtable.
450 AddMethods(RD, MorallyVirtual, Offset);
451
452 if (Path)
453 OverrideMethods(Path, MorallyVirtual);
454
455 return end(RD, offsets, Layout, PrimaryBase, PrimaryBaseWasVirtual,
456 MorallyVirtual, Offset, ForVirtualBase);
457 }
458
459 void GenerateVtableForVBases(const CXXRecordDecl *RD,
460 int64_t Offset = 0,
461 std::vector<std::pair<const CXXRecordDecl *,
462 int64_t> > *Path = 0) {
463 bool alloc = false;
464 if (Path == 0) {
465 alloc = true;
466 Path = new std::vector<std::pair<const CXXRecordDecl *,
467 int64_t> >;
468 }
469 // FIXME: We also need to override using all paths to a virtual base,
470 // right now, we just process the first path
471 Path->push_back(std::make_pair(RD, Offset));
472 for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
473 e = RD->bases_end(); i != e; ++i) {
474 const CXXRecordDecl *Base =
475 cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
476 if (i->isVirtual() && !IndirectPrimary.count(Base)) {
477 // Mark it so we don't output it twice.
478 IndirectPrimary.insert(Base);
479 StartNewTable();
Mike Stumpb21c4ee2009-10-14 18:14:51 +0000480 VCall.clear();
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000481 int64_t BaseOffset = BLayout.getVBaseClassOffset(Base);
482 GenerateVtableForBase(Base, true, BaseOffset, true, Path);
483 }
484 int64_t BaseOffset = Offset;
485 if (i->isVirtual())
486 BaseOffset = BLayout.getVBaseClassOffset(Base);
487 if (Base->getNumVBases())
488 GenerateVtableForVBases(Base, BaseOffset, Path);
489 }
490 Path->pop_back();
491 if (alloc)
492 delete Path;
493 }
494};
495
496
497VtableBuilder::Index_t VtableBuilder::VBlookup(CXXRecordDecl *D,
498 CXXRecordDecl *B) {
499 return CGM.getVtableInfo().getVirtualBaseOffsetIndex(D, B);
500}
501
502int64_t CGVtableInfo::getMethodVtableIndex(const CXXMethodDecl *MD) {
503 MD = MD->getCanonicalDecl();
504
505 MethodVtableIndicesTy::iterator I = MethodVtableIndices.find(MD);
506 if (I != MethodVtableIndices.end())
507 return I->second;
508
509 const CXXRecordDecl *RD = MD->getParent();
510
511 std::vector<llvm::Constant *> methods;
512 // FIXME: This seems expensive. Can we do a partial job to get
513 // just this data.
514 VtableBuilder b(methods, RD, CGM);
515 b.GenerateVtableForBase(RD);
516 b.GenerateVtableForVBases(RD);
517
518 MethodVtableIndices.insert(b.getIndex().begin(),
519 b.getIndex().end());
520
521 I = MethodVtableIndices.find(MD);
522 assert(I != MethodVtableIndices.end() && "Did not find index!");
523 return I->second;
524}
525
526int64_t CGVtableInfo::getVirtualBaseOffsetIndex(const CXXRecordDecl *RD,
527 const CXXRecordDecl *VBase) {
528 ClassPairTy ClassPair(RD, VBase);
529
530 VirtualBaseClassIndiciesTy::iterator I =
531 VirtualBaseClassIndicies.find(ClassPair);
532 if (I != VirtualBaseClassIndicies.end())
533 return I->second;
534
535 std::vector<llvm::Constant *> methods;
536 // FIXME: This seems expensive. Can we do a partial job to get
537 // just this data.
538 VtableBuilder b(methods, RD, CGM);
539 b.GenerateVtableForBase(RD);
540 b.GenerateVtableForVBases(RD);
541
542 for (llvm::DenseMap<const CXXRecordDecl *, uint64_t>::iterator I =
543 b.getVBIndex().begin(), E = b.getVBIndex().end(); I != E; ++I) {
544 // Insert all types.
545 ClassPairTy ClassPair(RD, I->first);
546
547 VirtualBaseClassIndicies.insert(std::make_pair(ClassPair, I->second));
548 }
549
550 I = VirtualBaseClassIndicies.find(ClassPair);
551 assert(I != VirtualBaseClassIndicies.end() && "Did not find index!");
552
553 return I->second;
554}
555
556llvm::Value *CodeGenFunction::GenerateVtable(const CXXRecordDecl *RD) {
557 llvm::SmallString<256> OutName;
558 llvm::raw_svector_ostream Out(OutName);
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000559 mangleCXXVtable(CGM.getMangleContext(), RD, Out);
Benjamin Kramerbb0a07b2009-10-11 22:57:54 +0000560
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000561 llvm::GlobalVariable::LinkageTypes linktype;
562 linktype = llvm::GlobalValue::WeakAnyLinkage;
563 std::vector<llvm::Constant *> methods;
564 llvm::Type *Ptr8Ty=llvm::PointerType::get(llvm::Type::getInt8Ty(VMContext),0);
565 int64_t AddressPoint;
566
567 VtableBuilder b(methods, RD, CGM);
568
569 // First comes the vtables for all the non-virtual bases...
570 AddressPoint = b.GenerateVtableForBase(RD);
571
572 // then the vtables for all the virtual bases.
573 b.GenerateVtableForVBases(RD);
574
575 llvm::Constant *C;
576 llvm::ArrayType *type = llvm::ArrayType::get(Ptr8Ty, methods.size());
577 C = llvm::ConstantArray::get(type, methods);
578 llvm::Value *vtable = new llvm::GlobalVariable(CGM.getModule(), type, true,
579 linktype, C, Out.str());
580 vtable = Builder.CreateBitCast(vtable, Ptr8Ty);
581 vtable = Builder.CreateGEP(vtable,
582 llvm::ConstantInt::get(llvm::Type::getInt64Ty(VMContext),
583 AddressPoint*LLVMPointerWidth/8));
584 return vtable;
585}