blob: c3f6074982e1baca81414b052beb4e87ecac896e [file] [log] [blame]
Anders Carlssondbd920c2009-10-11 22:13:54 +00001//===--- CGVtable.cpp - Emit LLVM Code for C++ vtables --------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This contains code dealing with C++ code generation of virtual tables.
11//
12//===----------------------------------------------------------------------===//
13
14#include "CodeGenModule.h"
15#include "CodeGenFunction.h"
16
17#include "clang/AST/RecordLayout.h"
18
19using namespace clang;
20using namespace CodeGen;
21
22class VtableBuilder {
23public:
24 /// Index_t - Vtable index type.
25 typedef uint64_t Index_t;
26private:
27 std::vector<llvm::Constant *> &methods;
28 std::vector<llvm::Constant *> submethods;
29 llvm::Type *Ptr8Ty;
30 /// Class - The most derived class that this vtable is being built for.
31 const CXXRecordDecl *Class;
32 /// BLayout - Layout for the most derived class that this vtable is being
33 /// built for.
34 const ASTRecordLayout &BLayout;
35 llvm::SmallSet<const CXXRecordDecl *, 32> IndirectPrimary;
36 llvm::SmallSet<const CXXRecordDecl *, 32> SeenVBase;
37 llvm::Constant *rtti;
38 llvm::LLVMContext &VMContext;
39 CodeGenModule &CGM; // Per-module state.
40 /// Index - Maps a method decl into a vtable index. Useful for virtual
41 /// dispatch codegen.
42 llvm::DenseMap<const CXXMethodDecl *, Index_t> Index;
43 llvm::DenseMap<const CXXMethodDecl *, Index_t> VCall;
44 llvm::DenseMap<const CXXMethodDecl *, Index_t> VCallOffset;
45 llvm::DenseMap<const CXXRecordDecl *, Index_t> VBIndex;
46 typedef std::pair<Index_t, Index_t> CallOffset;
47 typedef llvm::DenseMap<const CXXMethodDecl *, CallOffset> Thunks_t;
48 Thunks_t Thunks;
49 typedef llvm::DenseMap<const CXXMethodDecl *,
50 std::pair<CallOffset, CallOffset> > CovariantThunks_t;
51 CovariantThunks_t CovariantThunks;
52 std::vector<Index_t> VCalls;
53 typedef CXXRecordDecl::method_iterator method_iter;
54 // FIXME: Linkage should follow vtable
55 const bool Extern;
56 const uint32_t LLVMPointerWidth;
57 Index_t extra;
58public:
59 VtableBuilder(std::vector<llvm::Constant *> &meth,
60 const CXXRecordDecl *c,
61 CodeGenModule &cgm)
62 : methods(meth), Class(c), BLayout(cgm.getContext().getASTRecordLayout(c)),
63 rtti(cgm.GenerateRtti(c)), VMContext(cgm.getModule().getContext()),
64 CGM(cgm), Extern(true),
65 LLVMPointerWidth(cgm.getContext().Target.getPointerWidth(0)) {
66 Ptr8Ty = llvm::PointerType::get(llvm::Type::getInt8Ty(VMContext), 0);
67 }
68
69 llvm::DenseMap<const CXXMethodDecl *, Index_t> &getIndex() { return Index; }
70 llvm::DenseMap<const CXXRecordDecl *, Index_t> &getVBIndex()
71 { return VBIndex; }
72
73 llvm::Constant *wrap(Index_t i) {
74 llvm::Constant *m;
75 m = llvm::ConstantInt::get(llvm::Type::getInt64Ty(VMContext), i);
76 return llvm::ConstantExpr::getIntToPtr(m, Ptr8Ty);
77 }
78
79 llvm::Constant *wrap(llvm::Constant *m) {
80 return llvm::ConstantExpr::getBitCast(m, Ptr8Ty);
81 }
82
83 void GenerateVBaseOffsets(std::vector<llvm::Constant *> &offsets,
84 const CXXRecordDecl *RD, uint64_t Offset) {
85 for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
86 e = RD->bases_end(); i != e; ++i) {
87 const CXXRecordDecl *Base =
88 cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
89 if (i->isVirtual() && !SeenVBase.count(Base)) {
90 SeenVBase.insert(Base);
91 int64_t BaseOffset = -(Offset/8) + BLayout.getVBaseClassOffset(Base)/8;
92 llvm::Constant *m = wrap(BaseOffset);
93 m = wrap((0?700:0) + BaseOffset);
94 VBIndex[Base] = -(offsets.size()*LLVMPointerWidth/8)
95 - 3*LLVMPointerWidth/8;
96 offsets.push_back(m);
97 }
98 GenerateVBaseOffsets(offsets, Base, Offset);
99 }
100 }
101
102 void StartNewTable() {
103 SeenVBase.clear();
104 }
105
106 Index_t VBlookup(CXXRecordDecl *D, CXXRecordDecl *B);
107
108 /// getVbaseOffset - Returns the index into the vtable for the virtual base
109 /// offset for the given (B) virtual base of the derived class D.
110 Index_t getVbaseOffset(QualType qB, QualType qD) {
111 qD = qD->getAs<PointerType>()->getPointeeType();
112 qB = qB->getAs<PointerType>()->getPointeeType();
113 CXXRecordDecl *D = cast<CXXRecordDecl>(qD->getAs<RecordType>()->getDecl());
114 CXXRecordDecl *B = cast<CXXRecordDecl>(qB->getAs<RecordType>()->getDecl());
115 if (D != Class)
116 return VBlookup(D, B);
117 llvm::DenseMap<const CXXRecordDecl *, Index_t>::iterator i;
118 i = VBIndex.find(B);
119 if (i != VBIndex.end())
120 return i->second;
121 // FIXME: temporal botch, is this data here, by the time we need it?
122
123 assert(false && "FIXME: Locate the containing virtual base first");
124 return 0;
125 }
126
127 bool OverrideMethod(const CXXMethodDecl *MD, llvm::Constant *m,
128 bool MorallyVirtual, Index_t Offset) {
129 typedef CXXMethodDecl::method_iterator meth_iter;
130
131 // FIXME: Don't like the nested loops. For very large inheritance
132 // heirarchies we could have a table on the side with the final overridder
133 // and just replace each instance of an overridden method once. Would be
134 // nice to measure the cost/benefit on real code.
135
136 for (meth_iter mi = MD->begin_overridden_methods(),
137 e = MD->end_overridden_methods();
138 mi != e; ++mi) {
139 const CXXMethodDecl *OMD = *mi;
140 llvm::Constant *om;
141 om = CGM.GetAddrOfFunction(OMD, Ptr8Ty);
142 om = llvm::ConstantExpr::getBitCast(om, Ptr8Ty);
143
144 for (Index_t i = 0, e = submethods.size();
145 i != e; ++i) {
146 // FIXME: begin_overridden_methods might be too lax, covariance */
147 if (submethods[i] != om)
148 continue;
149 QualType nc_oret = OMD->getType()->getAs<FunctionType>()->getResultType();
150 CanQualType oret = CGM.getContext().getCanonicalType(nc_oret);
151 QualType nc_ret = MD->getType()->getAs<FunctionType>()->getResultType();
152 CanQualType ret = CGM.getContext().getCanonicalType(nc_ret);
153 CallOffset ReturnOffset = std::make_pair(0, 0);
154 if (oret != ret) {
155 // FIXME: calculate offsets for covariance
156 ReturnOffset = std::make_pair(42,getVbaseOffset(oret, ret));
157 }
158 Index[MD] = i;
159 submethods[i] = m;
160
161 Thunks.erase(OMD);
162 if (MorallyVirtual) {
163 Index_t &idx = VCall[OMD];
164 if (idx == 0) {
165 VCallOffset[MD] = Offset/8;
166 idx = VCalls.size()+1;
167 VCalls.push_back(0);
168 } else {
169 VCallOffset[MD] = VCallOffset[OMD];
170 VCalls[idx-1] = -VCallOffset[OMD] + Offset/8;
171 }
172 VCall[MD] = idx;
173 CallOffset ThisOffset;
174 // FIXME: calculate non-virtual offset
175 ThisOffset = std::make_pair(0, -((idx+extra+2)*LLVMPointerWidth/8));
176 if (ReturnOffset.first || ReturnOffset.second)
177 CovariantThunks[MD] = std::make_pair(ThisOffset, ReturnOffset);
178 else
179 Thunks[MD] = ThisOffset;
180 return true;
181 }
182#if 0
183 // FIXME: finish off
184 int64_t O = VCallOffset[OMD] - Offset/8;
185 if (O) {
186 Thunks[MD] = std::make_pair(O, 0);
187 }
188#endif
189 return true;
190 }
191 }
192
193 return false;
194 }
195
196 void InstallThunks() {
197 for (Thunks_t::iterator i = Thunks.begin(), e = Thunks.end();
198 i != e; ++i) {
199 const CXXMethodDecl *MD = i->first;
200 Index_t idx = Index[MD];
201 Index_t nv_O = i->second.first;
202 Index_t v_O = i->second.second;
203 submethods[idx] = CGM.BuildThunk(MD, Extern, nv_O, v_O);
204 }
205 Thunks.clear();
206 for (CovariantThunks_t::iterator i = CovariantThunks.begin(),
207 e = CovariantThunks.end();
208 i != e; ++i) {
209 const CXXMethodDecl *MD = i->first;
210 Index_t idx = Index[MD];
211 Index_t nv_t = i->second.first.first;
212 Index_t v_t = i->second.first.second;
213 Index_t nv_r = i->second.second.first;
214 Index_t v_r = i->second.second.second;
215 submethods[idx] = CGM.BuildCovariantThunk(MD, Extern, nv_t, v_t, nv_r,
216 v_r);
217 }
218 CovariantThunks.clear();
219 }
220
221 void OverrideMethods(std::vector<std::pair<const CXXRecordDecl *,
222 int64_t> > *Path, bool MorallyVirtual) {
223 for (std::vector<std::pair<const CXXRecordDecl *,
224 int64_t> >::reverse_iterator i =Path->rbegin(),
225 e = Path->rend(); i != e; ++i) {
226 const CXXRecordDecl *RD = i->first;
227 int64_t Offset = i->second;
228 for (method_iter mi = RD->method_begin(), me = RD->method_end(); mi != me;
229 ++mi) {
230 if (!mi->isVirtual())
231 continue;
232
233 const CXXMethodDecl *MD = *mi;
234 llvm::Constant *m = 0;
235 if (const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(MD))
236 m = wrap(CGM.GetAddrOfCXXDestructor(Dtor, Dtor_Complete));
237 else {
238 const FunctionProtoType *FPT =
239 MD->getType()->getAs<FunctionProtoType>();
240 const llvm::Type *Ty =
241 CGM.getTypes().GetFunctionType(CGM.getTypes().getFunctionInfo(MD),
242 FPT->isVariadic());
243
244 m = wrap(CGM.GetAddrOfFunction(MD, Ty));
245 }
246
247 OverrideMethod(MD, m, MorallyVirtual, Offset);
248 }
249 }
250 }
251
252 void AddMethod(const CXXMethodDecl *MD, bool MorallyVirtual, Index_t Offset) {
253 llvm::Constant *m = 0;
254 if (const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(MD))
255 m = wrap(CGM.GetAddrOfCXXDestructor(Dtor, Dtor_Complete));
256 else {
257 const FunctionProtoType *FPT = MD->getType()->getAs<FunctionProtoType>();
258 const llvm::Type *Ty =
259 CGM.getTypes().GetFunctionType(CGM.getTypes().getFunctionInfo(MD),
260 FPT->isVariadic());
261
262 m = wrap(CGM.GetAddrOfFunction(MD, Ty));
263 }
264
265 // If we can find a previously allocated slot for this, reuse it.
266 if (OverrideMethod(MD, m, MorallyVirtual, Offset))
267 return;
268
269 // else allocate a new slot.
270 Index[MD] = submethods.size();
271 submethods.push_back(m);
272 if (MorallyVirtual) {
273 VCallOffset[MD] = Offset/8;
274 Index_t &idx = VCall[MD];
275 // Allocate the first one, after that, we reuse the previous one.
276 if (idx == 0) {
277 idx = VCalls.size()+1;
278 VCalls.push_back(0);
279 }
280 }
281 }
282
283 void AddMethods(const CXXRecordDecl *RD, bool MorallyVirtual,
284 Index_t Offset) {
285 for (method_iter mi = RD->method_begin(), me = RD->method_end(); mi != me;
286 ++mi)
287 if (mi->isVirtual())
288 AddMethod(*mi, MorallyVirtual, Offset);
289 }
290
291 void NonVirtualBases(const CXXRecordDecl *RD, const ASTRecordLayout &Layout,
292 const CXXRecordDecl *PrimaryBase,
293 bool PrimaryBaseWasVirtual, bool MorallyVirtual,
294 int64_t Offset) {
295 for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
296 e = RD->bases_end(); i != e; ++i) {
297 if (i->isVirtual())
298 continue;
299 const CXXRecordDecl *Base =
300 cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
301 if (Base != PrimaryBase || PrimaryBaseWasVirtual) {
302 uint64_t o = Offset + Layout.getBaseClassOffset(Base);
303 StartNewTable();
304 std::vector<std::pair<const CXXRecordDecl *,
305 int64_t> > S;
306 S.push_back(std::make_pair(RD, Offset));
307 GenerateVtableForBase(Base, MorallyVirtual, o, false, &S);
308 }
309 }
310 }
311
312 Index_t end(const CXXRecordDecl *RD, std::vector<llvm::Constant *> &offsets,
313 const ASTRecordLayout &Layout,
314 const CXXRecordDecl *PrimaryBase,
315 bool PrimaryBaseWasVirtual, bool MorallyVirtual,
316 int64_t Offset, bool ForVirtualBase) {
317 StartNewTable();
318 extra = 0;
319 // FIXME: Cleanup.
320 if (!ForVirtualBase) {
321 // then virtual base offsets...
322 for (std::vector<llvm::Constant *>::reverse_iterator i = offsets.rbegin(),
323 e = offsets.rend(); i != e; ++i)
324 methods.push_back(*i);
325 }
326
327 // The vcalls come first...
328 for (std::vector<Index_t>::reverse_iterator i=VCalls.rbegin(),
329 e=VCalls.rend();
330 i != e; ++i)
331 methods.push_back(wrap((0?600:0) + *i));
332 VCalls.clear();
333
334 if (ForVirtualBase) {
335 // then virtual base offsets...
336 for (std::vector<llvm::Constant *>::reverse_iterator i = offsets.rbegin(),
337 e = offsets.rend(); i != e; ++i)
338 methods.push_back(*i);
339 }
340
341 methods.push_back(wrap(-(Offset/8)));
342 methods.push_back(rtti);
343 Index_t AddressPoint = methods.size();
344
345 InstallThunks();
346 methods.insert(methods.end(), submethods.begin(), submethods.end());
347 submethods.clear();
348
349 // and then the non-virtual bases.
350 NonVirtualBases(RD, Layout, PrimaryBase, PrimaryBaseWasVirtual,
351 MorallyVirtual, Offset);
352 return AddressPoint;
353 }
354
355 void Primaries(const CXXRecordDecl *RD, bool MorallyVirtual, int64_t Offset) {
356 if (!RD->isDynamicClass())
357 return;
358
359 const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(RD);
360 const CXXRecordDecl *PrimaryBase = Layout.getPrimaryBase();
361 const bool PrimaryBaseWasVirtual = Layout.getPrimaryBaseWasVirtual();
362
363 // vtables are composed from the chain of primaries.
364 if (PrimaryBase) {
365 if (PrimaryBaseWasVirtual)
366 IndirectPrimary.insert(PrimaryBase);
367 Primaries(PrimaryBase, PrimaryBaseWasVirtual|MorallyVirtual, Offset);
368 }
369
370 // And add the virtuals for the class to the primary vtable.
371 AddMethods(RD, MorallyVirtual, Offset);
372 }
373
374 int64_t GenerateVtableForBase(const CXXRecordDecl *RD,
375 bool MorallyVirtual = false, int64_t Offset = 0,
376 bool ForVirtualBase = false,
377 std::vector<std::pair<const CXXRecordDecl *,
378 int64_t> > *Path = 0) {
379 if (!RD->isDynamicClass())
380 return 0;
381
382 const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(RD);
383 const CXXRecordDecl *PrimaryBase = Layout.getPrimaryBase();
384 const bool PrimaryBaseWasVirtual = Layout.getPrimaryBaseWasVirtual();
385
386 std::vector<llvm::Constant *> offsets;
387 extra = 0;
388 GenerateVBaseOffsets(offsets, RD, Offset);
389 if (ForVirtualBase)
390 extra = offsets.size();
391
392 // vtables are composed from the chain of primaries.
393 if (PrimaryBase) {
394 if (PrimaryBaseWasVirtual)
395 IndirectPrimary.insert(PrimaryBase);
396 Primaries(PrimaryBase, PrimaryBaseWasVirtual|MorallyVirtual, Offset);
397 }
398
399 // And add the virtuals for the class to the primary vtable.
400 AddMethods(RD, MorallyVirtual, Offset);
401
402 if (Path)
403 OverrideMethods(Path, MorallyVirtual);
404
405 return end(RD, offsets, Layout, PrimaryBase, PrimaryBaseWasVirtual,
406 MorallyVirtual, Offset, ForVirtualBase);
407 }
408
409 void GenerateVtableForVBases(const CXXRecordDecl *RD,
410 int64_t Offset = 0,
411 std::vector<std::pair<const CXXRecordDecl *,
412 int64_t> > *Path = 0) {
413 bool alloc = false;
414 if (Path == 0) {
415 alloc = true;
416 Path = new std::vector<std::pair<const CXXRecordDecl *,
417 int64_t> >;
418 }
419 // FIXME: We also need to override using all paths to a virtual base,
420 // right now, we just process the first path
421 Path->push_back(std::make_pair(RD, Offset));
422 for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
423 e = RD->bases_end(); i != e; ++i) {
424 const CXXRecordDecl *Base =
425 cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
426 if (i->isVirtual() && !IndirectPrimary.count(Base)) {
427 // Mark it so we don't output it twice.
428 IndirectPrimary.insert(Base);
429 StartNewTable();
430 int64_t BaseOffset = BLayout.getVBaseClassOffset(Base);
431 GenerateVtableForBase(Base, true, BaseOffset, true, Path);
432 }
433 int64_t BaseOffset = Offset;
434 if (i->isVirtual())
435 BaseOffset = BLayout.getVBaseClassOffset(Base);
436 if (Base->getNumVBases())
437 GenerateVtableForVBases(Base, BaseOffset, Path);
438 }
439 Path->pop_back();
440 if (alloc)
441 delete Path;
442 }
443};
444
445
446VtableBuilder::Index_t VtableBuilder::VBlookup(CXXRecordDecl *D,
447 CXXRecordDecl *B) {
448 return CGM.getVtableInfo().getVirtualBaseOffsetIndex(D, B);
449}
450
451int64_t CGVtableInfo::getMethodVtableIndex(const CXXMethodDecl *MD) {
452 MD = MD->getCanonicalDecl();
453
454 MethodVtableIndicesTy::iterator I = MethodVtableIndices.find(MD);
455 if (I != MethodVtableIndices.end())
456 return I->second;
457
458 const CXXRecordDecl *RD = MD->getParent();
459
460 std::vector<llvm::Constant *> methods;
461 // FIXME: This seems expensive. Can we do a partial job to get
462 // just this data.
463 VtableBuilder b(methods, RD, CGM);
464 b.GenerateVtableForBase(RD);
465 b.GenerateVtableForVBases(RD);
466
467 MethodVtableIndices.insert(b.getIndex().begin(),
468 b.getIndex().end());
469
470 I = MethodVtableIndices.find(MD);
471 assert(I != MethodVtableIndices.end() && "Did not find index!");
472 return I->second;
473}
474
475int64_t CGVtableInfo::getVirtualBaseOffsetIndex(const CXXRecordDecl *RD,
476 const CXXRecordDecl *VBase) {
477 ClassPairTy ClassPair(RD, VBase);
478
479 VirtualBaseClassIndiciesTy::iterator I =
480 VirtualBaseClassIndicies.find(ClassPair);
481 if (I != VirtualBaseClassIndicies.end())
482 return I->second;
483
484 std::vector<llvm::Constant *> methods;
485 // FIXME: This seems expensive. Can we do a partial job to get
486 // just this data.
487 VtableBuilder b(methods, RD, CGM);
488 b.GenerateVtableForBase(RD);
489 b.GenerateVtableForVBases(RD);
490
491 for (llvm::DenseMap<const CXXRecordDecl *, uint64_t>::iterator I =
492 b.getVBIndex().begin(), E = b.getVBIndex().end(); I != E; ++I) {
493 // Insert all types.
494 ClassPairTy ClassPair(RD, I->first);
495
496 VirtualBaseClassIndicies.insert(std::make_pair(ClassPair, I->second));
497 }
498
499 I = VirtualBaseClassIndicies.find(ClassPair);
500 assert(I != VirtualBaseClassIndicies.end() && "Did not find index!");
501
502 return I->second;
503}
504
505llvm::Value *CodeGenFunction::GenerateVtable(const CXXRecordDecl *RD) {
506 llvm::SmallString<256> OutName;
507 llvm::raw_svector_ostream Out(OutName);
Anders Carlssondbd920c2009-10-11 22:13:54 +0000508 mangleCXXVtable(CGM.getMangleContext(), RD, Out);
Benjamin Kramer7a9474e2009-10-11 22:57:54 +0000509
Anders Carlssondbd920c2009-10-11 22:13:54 +0000510 llvm::GlobalVariable::LinkageTypes linktype;
511 linktype = llvm::GlobalValue::WeakAnyLinkage;
512 std::vector<llvm::Constant *> methods;
513 llvm::Type *Ptr8Ty=llvm::PointerType::get(llvm::Type::getInt8Ty(VMContext),0);
514 int64_t AddressPoint;
515
516 VtableBuilder b(methods, RD, CGM);
517
518 // First comes the vtables for all the non-virtual bases...
519 AddressPoint = b.GenerateVtableForBase(RD);
520
521 // then the vtables for all the virtual bases.
522 b.GenerateVtableForVBases(RD);
523
524 llvm::Constant *C;
525 llvm::ArrayType *type = llvm::ArrayType::get(Ptr8Ty, methods.size());
526 C = llvm::ConstantArray::get(type, methods);
527 llvm::Value *vtable = new llvm::GlobalVariable(CGM.getModule(), type, true,
528 linktype, C, Out.str());
529 vtable = Builder.CreateBitCast(vtable, Ptr8Ty);
530 vtable = Builder.CreateGEP(vtable,
531 llvm::ConstantInt::get(llvm::Type::getInt64Ty(VMContext),
532 AddressPoint*LLVMPointerWidth/8));
533 return vtable;
534}