blob: dca28dc40a217a4387978a39db606da1eed81e95 [file] [log] [blame]
Anders Carlssondbd920c2009-10-11 22:13:54 +00001//===--- CGVtable.cpp - Emit LLVM Code for C++ vtables --------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This contains code dealing with C++ code generation of virtual tables.
11//
12//===----------------------------------------------------------------------===//
13
14#include "CodeGenModule.h"
15#include "CodeGenFunction.h"
16
17#include "clang/AST/RecordLayout.h"
18
19using namespace clang;
20using namespace CodeGen;
21
22class VtableBuilder {
23public:
24 /// Index_t - Vtable index type.
25 typedef uint64_t Index_t;
26private:
27 std::vector<llvm::Constant *> &methods;
28 std::vector<llvm::Constant *> submethods;
29 llvm::Type *Ptr8Ty;
30 /// Class - The most derived class that this vtable is being built for.
31 const CXXRecordDecl *Class;
32 /// BLayout - Layout for the most derived class that this vtable is being
33 /// built for.
34 const ASTRecordLayout &BLayout;
35 llvm::SmallSet<const CXXRecordDecl *, 32> IndirectPrimary;
36 llvm::SmallSet<const CXXRecordDecl *, 32> SeenVBase;
37 llvm::Constant *rtti;
38 llvm::LLVMContext &VMContext;
39 CodeGenModule &CGM; // Per-module state.
40 /// Index - Maps a method decl into a vtable index. Useful for virtual
41 /// dispatch codegen.
42 llvm::DenseMap<const CXXMethodDecl *, Index_t> Index;
43 llvm::DenseMap<const CXXMethodDecl *, Index_t> VCall;
44 llvm::DenseMap<const CXXMethodDecl *, Index_t> VCallOffset;
45 llvm::DenseMap<const CXXRecordDecl *, Index_t> VBIndex;
46 typedef std::pair<Index_t, Index_t> CallOffset;
47 typedef llvm::DenseMap<const CXXMethodDecl *, CallOffset> Thunks_t;
48 Thunks_t Thunks;
49 typedef llvm::DenseMap<const CXXMethodDecl *,
Mike Stumpd9878a12009-10-13 10:55:21 +000050 std::pair<std::pair<CallOffset, CallOffset>,
51 CanQualType> > CovariantThunks_t;
Anders Carlssondbd920c2009-10-11 22:13:54 +000052 CovariantThunks_t CovariantThunks;
53 std::vector<Index_t> VCalls;
54 typedef CXXRecordDecl::method_iterator method_iter;
55 // FIXME: Linkage should follow vtable
56 const bool Extern;
57 const uint32_t LLVMPointerWidth;
58 Index_t extra;
Mike Stump11dea942009-10-15 02:04:03 +000059 int CurrentVBaseOffset;
60 typedef std::vector<std::pair<const CXXRecordDecl *, int64_t> > Path_t;
Anders Carlssondbd920c2009-10-11 22:13:54 +000061public:
62 VtableBuilder(std::vector<llvm::Constant *> &meth,
63 const CXXRecordDecl *c,
64 CodeGenModule &cgm)
65 : methods(meth), Class(c), BLayout(cgm.getContext().getASTRecordLayout(c)),
66 rtti(cgm.GenerateRtti(c)), VMContext(cgm.getModule().getContext()),
67 CGM(cgm), Extern(true),
Mike Stump11dea942009-10-15 02:04:03 +000068 LLVMPointerWidth(cgm.getContext().Target.getPointerWidth(0)),
69 CurrentVBaseOffset(0) {
Anders Carlssondbd920c2009-10-11 22:13:54 +000070 Ptr8Ty = llvm::PointerType::get(llvm::Type::getInt8Ty(VMContext), 0);
71 }
72
73 llvm::DenseMap<const CXXMethodDecl *, Index_t> &getIndex() { return Index; }
74 llvm::DenseMap<const CXXRecordDecl *, Index_t> &getVBIndex()
75 { return VBIndex; }
76
77 llvm::Constant *wrap(Index_t i) {
78 llvm::Constant *m;
79 m = llvm::ConstantInt::get(llvm::Type::getInt64Ty(VMContext), i);
80 return llvm::ConstantExpr::getIntToPtr(m, Ptr8Ty);
81 }
82
83 llvm::Constant *wrap(llvm::Constant *m) {
84 return llvm::ConstantExpr::getBitCast(m, Ptr8Ty);
85 }
86
87 void GenerateVBaseOffsets(std::vector<llvm::Constant *> &offsets,
Mike Stumpd9878a12009-10-13 10:55:21 +000088 const CXXRecordDecl *RD, uint64_t Offset,
Mike Stumpab28c132009-10-13 22:54:56 +000089 bool updateVBIndex, Index_t current_vbindex) {
Anders Carlssondbd920c2009-10-11 22:13:54 +000090 for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
91 e = RD->bases_end(); i != e; ++i) {
92 const CXXRecordDecl *Base =
93 cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
Mike Stumpab28c132009-10-13 22:54:56 +000094 Index_t next_vbindex = current_vbindex;
Anders Carlssondbd920c2009-10-11 22:13:54 +000095 if (i->isVirtual() && !SeenVBase.count(Base)) {
96 SeenVBase.insert(Base);
97 int64_t BaseOffset = -(Offset/8) + BLayout.getVBaseClassOffset(Base)/8;
98 llvm::Constant *m = wrap(BaseOffset);
99 m = wrap((0?700:0) + BaseOffset);
Mike Stumpab28c132009-10-13 22:54:56 +0000100 if (updateVBIndex) {
101 next_vbindex = (ssize_t)(-(offsets.size()*LLVMPointerWidth/8)
102 - 3*LLVMPointerWidth/8);
103 VBIndex[Base] = next_vbindex;
104 }
Anders Carlssondbd920c2009-10-11 22:13:54 +0000105 offsets.push_back(m);
106 }
Mike Stumpab28c132009-10-13 22:54:56 +0000107 // We also record offsets for non-virtual bases to closest enclosing
108 // virtual base. We do this so that we don't have to search
109 // for the nearst virtual base class when generating thunks.
110 if (updateVBIndex && VBIndex.count(Base) == 0)
111 VBIndex[Base] = next_vbindex;
112 GenerateVBaseOffsets(offsets, Base, Offset, updateVBIndex, next_vbindex);
Anders Carlssondbd920c2009-10-11 22:13:54 +0000113 }
114 }
115
116 void StartNewTable() {
117 SeenVBase.clear();
118 }
119
120 Index_t VBlookup(CXXRecordDecl *D, CXXRecordDecl *B);
121
Mike Stump3425b972009-10-15 09:30:16 +0000122 Index_t getNVOffset_1(const CXXRecordDecl *D, const CXXRecordDecl *B,
123 Index_t Offset = 0) {
124
125 if (B == D)
126 return Offset;
127
128 const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(D);
129 for (CXXRecordDecl::base_class_const_iterator i = D->bases_begin(),
130 e = D->bases_end(); i != e; ++i) {
131 const CXXRecordDecl *Base =
132 cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
133 int64_t BaseOffset = 0;
134 if (!i->isVirtual())
135 BaseOffset = Offset + Layout.getBaseClassOffset(Base);
136 int64_t o = getNVOffset_1(Base, B, BaseOffset);
137 if (o >= 0)
138 return o;
139 }
140
141 return -1;
142 }
143
144 /// getNVOffset - Returns the non-virtual offset for the given (B) base of the
145 /// derived class D.
146 Index_t getNVOffset(QualType qB, QualType qD) {
147 qD = qD->getAs<PointerType>()->getPointeeType();
148 qB = qB->getAs<PointerType>()->getPointeeType();
149 CXXRecordDecl *D = cast<CXXRecordDecl>(qD->getAs<RecordType>()->getDecl());
150 CXXRecordDecl *B = cast<CXXRecordDecl>(qB->getAs<RecordType>()->getDecl());
151 int64_t o = getNVOffset_1(D, B);
152 if (o >= 0)
153 return o;
154
155 assert(false && "FIXME: non-virtual base not found");
156 return 0;
157 }
158
Anders Carlssondbd920c2009-10-11 22:13:54 +0000159 /// getVbaseOffset - Returns the index into the vtable for the virtual base
160 /// offset for the given (B) virtual base of the derived class D.
161 Index_t getVbaseOffset(QualType qB, QualType qD) {
162 qD = qD->getAs<PointerType>()->getPointeeType();
163 qB = qB->getAs<PointerType>()->getPointeeType();
164 CXXRecordDecl *D = cast<CXXRecordDecl>(qD->getAs<RecordType>()->getDecl());
165 CXXRecordDecl *B = cast<CXXRecordDecl>(qB->getAs<RecordType>()->getDecl());
166 if (D != Class)
167 return VBlookup(D, B);
168 llvm::DenseMap<const CXXRecordDecl *, Index_t>::iterator i;
169 i = VBIndex.find(B);
170 if (i != VBIndex.end())
171 return i->second;
Anders Carlssondbd920c2009-10-11 22:13:54 +0000172
Mike Stumpab28c132009-10-13 22:54:56 +0000173 assert(false && "FIXME: Base not found");
Anders Carlssondbd920c2009-10-11 22:13:54 +0000174 return 0;
175 }
176
177 bool OverrideMethod(const CXXMethodDecl *MD, llvm::Constant *m,
Mike Stump3425b972009-10-15 09:30:16 +0000178 bool MorallyVirtual, Index_t OverrideOffset,
179 Index_t Offset) {
Anders Carlssondbd920c2009-10-11 22:13:54 +0000180 typedef CXXMethodDecl::method_iterator meth_iter;
Mike Stump3425b972009-10-15 09:30:16 +0000181 // FIXME: Should OverrideOffset's be Offset?
Anders Carlssondbd920c2009-10-11 22:13:54 +0000182
183 // FIXME: Don't like the nested loops. For very large inheritance
184 // heirarchies we could have a table on the side with the final overridder
185 // and just replace each instance of an overridden method once. Would be
186 // nice to measure the cost/benefit on real code.
187
188 for (meth_iter mi = MD->begin_overridden_methods(),
189 e = MD->end_overridden_methods();
190 mi != e; ++mi) {
191 const CXXMethodDecl *OMD = *mi;
192 llvm::Constant *om;
193 om = CGM.GetAddrOfFunction(OMD, Ptr8Ty);
194 om = llvm::ConstantExpr::getBitCast(om, Ptr8Ty);
195
196 for (Index_t i = 0, e = submethods.size();
197 i != e; ++i) {
198 // FIXME: begin_overridden_methods might be too lax, covariance */
199 if (submethods[i] != om)
200 continue;
201 QualType nc_oret = OMD->getType()->getAs<FunctionType>()->getResultType();
202 CanQualType oret = CGM.getContext().getCanonicalType(nc_oret);
203 QualType nc_ret = MD->getType()->getAs<FunctionType>()->getResultType();
204 CanQualType ret = CGM.getContext().getCanonicalType(nc_ret);
205 CallOffset ReturnOffset = std::make_pair(0, 0);
206 if (oret != ret) {
207 // FIXME: calculate offsets for covariance
Mike Stumpd9878a12009-10-13 10:55:21 +0000208 if (CovariantThunks.count(OMD)) {
209 oret = CovariantThunks[OMD].second;
210 CovariantThunks.erase(OMD);
211 }
Mike Stump3425b972009-10-15 09:30:16 +0000212 // FIXME: Double check oret
213 Index_t nv = getNVOffset(oret, ret)/8;
Mike Stumpd9878a12009-10-13 10:55:21 +0000214 ReturnOffset = std::make_pair(nv, getVbaseOffset(oret, ret));
Anders Carlssondbd920c2009-10-11 22:13:54 +0000215 }
216 Index[MD] = i;
217 submethods[i] = m;
218
219 Thunks.erase(OMD);
220 if (MorallyVirtual) {
221 Index_t &idx = VCall[OMD];
222 if (idx == 0) {
Mike Stump3425b972009-10-15 09:30:16 +0000223 VCallOffset[MD] = OverrideOffset/8;
Anders Carlssondbd920c2009-10-11 22:13:54 +0000224 idx = VCalls.size()+1;
225 VCalls.push_back(0);
226 } else {
227 VCallOffset[MD] = VCallOffset[OMD];
Mike Stump3425b972009-10-15 09:30:16 +0000228 VCalls[idx-1] = -VCallOffset[OMD] + OverrideOffset/8;
Anders Carlssondbd920c2009-10-11 22:13:54 +0000229 }
230 VCall[MD] = idx;
231 CallOffset ThisOffset;
Mike Stump3425b972009-10-15 09:30:16 +0000232 ThisOffset = std::make_pair(CurrentVBaseOffset/8 - Offset/8,
Mike Stump11dea942009-10-15 02:04:03 +0000233 -((idx+extra+2)*LLVMPointerWidth/8));
234 // FIXME: Do we always have to build a covariant thunk to save oret,
235 // which is the containing virtual base class?
Anders Carlssondbd920c2009-10-11 22:13:54 +0000236 if (ReturnOffset.first || ReturnOffset.second)
Mike Stumpd9878a12009-10-13 10:55:21 +0000237 CovariantThunks[MD] = std::make_pair(std::make_pair(ThisOffset,
238 ReturnOffset),
239 oret);
Anders Carlssondbd920c2009-10-11 22:13:54 +0000240 else
241 Thunks[MD] = ThisOffset;
242 return true;
243 }
Mike Stumpab28c132009-10-13 22:54:56 +0000244
Anders Carlssondbd920c2009-10-11 22:13:54 +0000245 // FIXME: finish off
Mike Stump3425b972009-10-15 09:30:16 +0000246 int64_t O = VCallOffset[OMD] - OverrideOffset/8;
247 // int64_t O = CurrentVBaseOffset/8 - OverrideOffset/8;
Mike Stumpab28c132009-10-13 22:54:56 +0000248 if (O || ReturnOffset.first || ReturnOffset.second) {
249 CallOffset ThisOffset = std::make_pair(O, 0);
250
251 if (ReturnOffset.first || ReturnOffset.second)
252 CovariantThunks[MD] = std::make_pair(std::make_pair(ThisOffset,
253 ReturnOffset),
254 oret);
255 else
256 Thunks[MD] = ThisOffset;
Anders Carlssondbd920c2009-10-11 22:13:54 +0000257 }
Anders Carlssondbd920c2009-10-11 22:13:54 +0000258 return true;
259 }
260 }
261
262 return false;
263 }
264
265 void InstallThunks() {
266 for (Thunks_t::iterator i = Thunks.begin(), e = Thunks.end();
267 i != e; ++i) {
268 const CXXMethodDecl *MD = i->first;
269 Index_t idx = Index[MD];
270 Index_t nv_O = i->second.first;
271 Index_t v_O = i->second.second;
272 submethods[idx] = CGM.BuildThunk(MD, Extern, nv_O, v_O);
273 }
274 Thunks.clear();
275 for (CovariantThunks_t::iterator i = CovariantThunks.begin(),
276 e = CovariantThunks.end();
277 i != e; ++i) {
278 const CXXMethodDecl *MD = i->first;
279 Index_t idx = Index[MD];
Mike Stumpd9878a12009-10-13 10:55:21 +0000280 Index_t nv_t = i->second.first.first.first;
281 Index_t v_t = i->second.first.first.second;
282 Index_t nv_r = i->second.first.second.first;
283 Index_t v_r = i->second.first.second.second;
Anders Carlssondbd920c2009-10-11 22:13:54 +0000284 submethods[idx] = CGM.BuildCovariantThunk(MD, Extern, nv_t, v_t, nv_r,
285 v_r);
286 }
287 CovariantThunks.clear();
288 }
289
Mike Stump3425b972009-10-15 09:30:16 +0000290 void OverrideMethods(Path_t *Path, bool MorallyVirtual, int64_t Offset) {
Mike Stump11dea942009-10-15 02:04:03 +0000291 for (Path_t::reverse_iterator i = Path->rbegin(),
Anders Carlssondbd920c2009-10-11 22:13:54 +0000292 e = Path->rend(); i != e; ++i) {
293 const CXXRecordDecl *RD = i->first;
Mike Stump3425b972009-10-15 09:30:16 +0000294 int64_t OverrideOffset = i->second;
Anders Carlssondbd920c2009-10-11 22:13:54 +0000295 for (method_iter mi = RD->method_begin(), me = RD->method_end(); mi != me;
296 ++mi) {
297 if (!mi->isVirtual())
298 continue;
299
300 const CXXMethodDecl *MD = *mi;
301 llvm::Constant *m = 0;
302 if (const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(MD))
303 m = wrap(CGM.GetAddrOfCXXDestructor(Dtor, Dtor_Complete));
304 else {
305 const FunctionProtoType *FPT =
306 MD->getType()->getAs<FunctionProtoType>();
307 const llvm::Type *Ty =
308 CGM.getTypes().GetFunctionType(CGM.getTypes().getFunctionInfo(MD),
309 FPT->isVariadic());
310
311 m = wrap(CGM.GetAddrOfFunction(MD, Ty));
312 }
313
Mike Stump3425b972009-10-15 09:30:16 +0000314 OverrideMethod(MD, m, MorallyVirtual, OverrideOffset, Offset);
Anders Carlssondbd920c2009-10-11 22:13:54 +0000315 }
316 }
317 }
318
319 void AddMethod(const CXXMethodDecl *MD, bool MorallyVirtual, Index_t Offset) {
320 llvm::Constant *m = 0;
321 if (const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(MD))
322 m = wrap(CGM.GetAddrOfCXXDestructor(Dtor, Dtor_Complete));
323 else {
324 const FunctionProtoType *FPT = MD->getType()->getAs<FunctionProtoType>();
325 const llvm::Type *Ty =
326 CGM.getTypes().GetFunctionType(CGM.getTypes().getFunctionInfo(MD),
327 FPT->isVariadic());
328
329 m = wrap(CGM.GetAddrOfFunction(MD, Ty));
330 }
331
332 // If we can find a previously allocated slot for this, reuse it.
Mike Stump3425b972009-10-15 09:30:16 +0000333 if (OverrideMethod(MD, m, MorallyVirtual, Offset, Offset))
Anders Carlssondbd920c2009-10-11 22:13:54 +0000334 return;
335
336 // else allocate a new slot.
337 Index[MD] = submethods.size();
338 submethods.push_back(m);
339 if (MorallyVirtual) {
340 VCallOffset[MD] = Offset/8;
341 Index_t &idx = VCall[MD];
342 // Allocate the first one, after that, we reuse the previous one.
343 if (idx == 0) {
344 idx = VCalls.size()+1;
345 VCalls.push_back(0);
346 }
347 }
348 }
349
350 void AddMethods(const CXXRecordDecl *RD, bool MorallyVirtual,
351 Index_t Offset) {
352 for (method_iter mi = RD->method_begin(), me = RD->method_end(); mi != me;
353 ++mi)
354 if (mi->isVirtual())
355 AddMethod(*mi, MorallyVirtual, Offset);
356 }
357
358 void NonVirtualBases(const CXXRecordDecl *RD, const ASTRecordLayout &Layout,
359 const CXXRecordDecl *PrimaryBase,
360 bool PrimaryBaseWasVirtual, bool MorallyVirtual,
Mike Stump11dea942009-10-15 02:04:03 +0000361 int64_t Offset, Path_t *Path) {
362 Path->push_back(std::make_pair(RD, Offset));
Anders Carlssondbd920c2009-10-11 22:13:54 +0000363 for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
364 e = RD->bases_end(); i != e; ++i) {
365 if (i->isVirtual())
366 continue;
367 const CXXRecordDecl *Base =
368 cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
369 if (Base != PrimaryBase || PrimaryBaseWasVirtual) {
370 uint64_t o = Offset + Layout.getBaseClassOffset(Base);
371 StartNewTable();
Mike Stump11dea942009-10-15 02:04:03 +0000372 CurrentVBaseOffset = Offset;
373 GenerateVtableForBase(Base, MorallyVirtual, o, false, Path);
Anders Carlssondbd920c2009-10-11 22:13:54 +0000374 }
375 }
Mike Stump11dea942009-10-15 02:04:03 +0000376 Path->pop_back();
Anders Carlssondbd920c2009-10-11 22:13:54 +0000377 }
378
Mike Stump0ca42792009-10-14 18:14:51 +0000379// #define D(X) do { X; } while (0)
380#define D(X)
381
382 void insertVCalls(int InsertionPoint) {
383 llvm::Constant *e = 0;
384 D(VCalls.insert(VCalls.begin(), 673));
385 D(VCalls.push_back(672));
Mike Stump3425b972009-10-15 09:30:16 +0000386 methods.insert(methods.begin() + InsertionPoint, VCalls.size(), e);
Mike Stump0ca42792009-10-14 18:14:51 +0000387 // The vcalls come first...
388 for (std::vector<Index_t>::reverse_iterator i = VCalls.rbegin(),
389 e = VCalls.rend();
390 i != e; ++i)
391 methods[InsertionPoint++] = wrap((0?600:0) + *i);
392 VCalls.clear();
393 }
394
Anders Carlssondbd920c2009-10-11 22:13:54 +0000395 Index_t end(const CXXRecordDecl *RD, std::vector<llvm::Constant *> &offsets,
396 const ASTRecordLayout &Layout,
397 const CXXRecordDecl *PrimaryBase,
398 bool PrimaryBaseWasVirtual, bool MorallyVirtual,
Mike Stump11dea942009-10-15 02:04:03 +0000399 int64_t Offset, bool ForVirtualBase, Path_t *Path) {
400 bool alloc = false;
401 if (Path == 0) {
402 alloc = true;
403 Path = new Path_t;
404 }
405
Anders Carlssondbd920c2009-10-11 22:13:54 +0000406 StartNewTable();
407 extra = 0;
408 // FIXME: Cleanup.
409 if (!ForVirtualBase) {
Mike Stump0ca42792009-10-14 18:14:51 +0000410 D(methods.push_back(wrap(666)));
Anders Carlssondbd920c2009-10-11 22:13:54 +0000411 // then virtual base offsets...
412 for (std::vector<llvm::Constant *>::reverse_iterator i = offsets.rbegin(),
413 e = offsets.rend(); i != e; ++i)
414 methods.push_back(*i);
Mike Stump0ca42792009-10-14 18:14:51 +0000415 D(methods.push_back(wrap(667)));
Anders Carlssondbd920c2009-10-11 22:13:54 +0000416 }
417
Mike Stump0ca42792009-10-14 18:14:51 +0000418 bool DeferVCalls = MorallyVirtual || ForVirtualBase;
419 int VCallInsertionPoint = methods.size();
420 if (!DeferVCalls) {
421 insertVCalls(VCallInsertionPoint);
Mike Stump3425b972009-10-15 09:30:16 +0000422 } else
423 // FIXME: just for extra, or for all uses of VCalls.size post this?
424 extra = -VCalls.size();
Anders Carlssondbd920c2009-10-11 22:13:54 +0000425
426 if (ForVirtualBase) {
Mike Stump0ca42792009-10-14 18:14:51 +0000427 D(methods.push_back(wrap(668)));
Anders Carlssondbd920c2009-10-11 22:13:54 +0000428 // then virtual base offsets...
429 for (std::vector<llvm::Constant *>::reverse_iterator i = offsets.rbegin(),
430 e = offsets.rend(); i != e; ++i)
431 methods.push_back(*i);
Mike Stump0ca42792009-10-14 18:14:51 +0000432 D(methods.push_back(wrap(669)));
Anders Carlssondbd920c2009-10-11 22:13:54 +0000433 }
434
435 methods.push_back(wrap(-(Offset/8)));
436 methods.push_back(rtti);
437 Index_t AddressPoint = methods.size();
438
439 InstallThunks();
440 methods.insert(methods.end(), submethods.begin(), submethods.end());
441 submethods.clear();
442
443 // and then the non-virtual bases.
444 NonVirtualBases(RD, Layout, PrimaryBase, PrimaryBaseWasVirtual,
Mike Stump11dea942009-10-15 02:04:03 +0000445 MorallyVirtual, Offset, Path);
Mike Stump0ca42792009-10-14 18:14:51 +0000446
447 if (ForVirtualBase) {
448 D(methods.push_back(wrap(670)));
449 insertVCalls(VCallInsertionPoint);
450 AddressPoint += VCalls.size();
451 D(methods.push_back(wrap(671)));
452 }
453
Mike Stump11dea942009-10-15 02:04:03 +0000454 if (alloc) {
455 delete Path;
456 }
Anders Carlssondbd920c2009-10-11 22:13:54 +0000457 return AddressPoint;
458 }
459
460 void Primaries(const CXXRecordDecl *RD, bool MorallyVirtual, int64_t Offset) {
461 if (!RD->isDynamicClass())
462 return;
463
464 const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(RD);
465 const CXXRecordDecl *PrimaryBase = Layout.getPrimaryBase();
466 const bool PrimaryBaseWasVirtual = Layout.getPrimaryBaseWasVirtual();
467
468 // vtables are composed from the chain of primaries.
469 if (PrimaryBase) {
470 if (PrimaryBaseWasVirtual)
471 IndirectPrimary.insert(PrimaryBase);
472 Primaries(PrimaryBase, PrimaryBaseWasVirtual|MorallyVirtual, Offset);
473 }
474
475 // And add the virtuals for the class to the primary vtable.
476 AddMethods(RD, MorallyVirtual, Offset);
477 }
478
479 int64_t GenerateVtableForBase(const CXXRecordDecl *RD,
480 bool MorallyVirtual = false, int64_t Offset = 0,
481 bool ForVirtualBase = false,
Mike Stump11dea942009-10-15 02:04:03 +0000482 Path_t *Path = 0) {
Anders Carlssondbd920c2009-10-11 22:13:54 +0000483 if (!RD->isDynamicClass())
484 return 0;
485
486 const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(RD);
487 const CXXRecordDecl *PrimaryBase = Layout.getPrimaryBase();
488 const bool PrimaryBaseWasVirtual = Layout.getPrimaryBaseWasVirtual();
489
490 std::vector<llvm::Constant *> offsets;
491 extra = 0;
Mike Stumpab28c132009-10-13 22:54:56 +0000492 GenerateVBaseOffsets(offsets, RD, Offset, !ForVirtualBase, 0);
Anders Carlssondbd920c2009-10-11 22:13:54 +0000493 if (ForVirtualBase)
494 extra = offsets.size();
495
496 // vtables are composed from the chain of primaries.
497 if (PrimaryBase) {
498 if (PrimaryBaseWasVirtual)
499 IndirectPrimary.insert(PrimaryBase);
500 Primaries(PrimaryBase, PrimaryBaseWasVirtual|MorallyVirtual, Offset);
501 }
502
503 // And add the virtuals for the class to the primary vtable.
504 AddMethods(RD, MorallyVirtual, Offset);
505
506 if (Path)
Mike Stump3425b972009-10-15 09:30:16 +0000507 OverrideMethods(Path, MorallyVirtual, Offset);
Anders Carlssondbd920c2009-10-11 22:13:54 +0000508
509 return end(RD, offsets, Layout, PrimaryBase, PrimaryBaseWasVirtual,
Mike Stump11dea942009-10-15 02:04:03 +0000510 MorallyVirtual, Offset, ForVirtualBase, Path);
Anders Carlssondbd920c2009-10-11 22:13:54 +0000511 }
512
513 void GenerateVtableForVBases(const CXXRecordDecl *RD,
514 int64_t Offset = 0,
Mike Stump11dea942009-10-15 02:04:03 +0000515 Path_t *Path = 0) {
Anders Carlssondbd920c2009-10-11 22:13:54 +0000516 bool alloc = false;
517 if (Path == 0) {
518 alloc = true;
Mike Stump11dea942009-10-15 02:04:03 +0000519 Path = new Path_t;
Anders Carlssondbd920c2009-10-11 22:13:54 +0000520 }
521 // FIXME: We also need to override using all paths to a virtual base,
522 // right now, we just process the first path
523 Path->push_back(std::make_pair(RD, Offset));
524 for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
525 e = RD->bases_end(); i != e; ++i) {
526 const CXXRecordDecl *Base =
527 cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
528 if (i->isVirtual() && !IndirectPrimary.count(Base)) {
529 // Mark it so we don't output it twice.
530 IndirectPrimary.insert(Base);
531 StartNewTable();
Mike Stump0ca42792009-10-14 18:14:51 +0000532 VCall.clear();
Anders Carlssondbd920c2009-10-11 22:13:54 +0000533 int64_t BaseOffset = BLayout.getVBaseClassOffset(Base);
Mike Stump11dea942009-10-15 02:04:03 +0000534 CurrentVBaseOffset = BaseOffset;
Anders Carlssondbd920c2009-10-11 22:13:54 +0000535 GenerateVtableForBase(Base, true, BaseOffset, true, Path);
536 }
537 int64_t BaseOffset = Offset;
538 if (i->isVirtual())
539 BaseOffset = BLayout.getVBaseClassOffset(Base);
Mike Stump11dea942009-10-15 02:04:03 +0000540 if (Base->getNumVBases()) {
541 CurrentVBaseOffset = BaseOffset;
Anders Carlssondbd920c2009-10-11 22:13:54 +0000542 GenerateVtableForVBases(Base, BaseOffset, Path);
Mike Stump11dea942009-10-15 02:04:03 +0000543 }
Anders Carlssondbd920c2009-10-11 22:13:54 +0000544 }
545 Path->pop_back();
546 if (alloc)
547 delete Path;
548 }
549};
550
551
552VtableBuilder::Index_t VtableBuilder::VBlookup(CXXRecordDecl *D,
553 CXXRecordDecl *B) {
554 return CGM.getVtableInfo().getVirtualBaseOffsetIndex(D, B);
555}
556
557int64_t CGVtableInfo::getMethodVtableIndex(const CXXMethodDecl *MD) {
558 MD = MD->getCanonicalDecl();
559
560 MethodVtableIndicesTy::iterator I = MethodVtableIndices.find(MD);
561 if (I != MethodVtableIndices.end())
562 return I->second;
563
564 const CXXRecordDecl *RD = MD->getParent();
565
566 std::vector<llvm::Constant *> methods;
567 // FIXME: This seems expensive. Can we do a partial job to get
568 // just this data.
569 VtableBuilder b(methods, RD, CGM);
570 b.GenerateVtableForBase(RD);
571 b.GenerateVtableForVBases(RD);
572
573 MethodVtableIndices.insert(b.getIndex().begin(),
574 b.getIndex().end());
575
576 I = MethodVtableIndices.find(MD);
577 assert(I != MethodVtableIndices.end() && "Did not find index!");
578 return I->second;
579}
580
581int64_t CGVtableInfo::getVirtualBaseOffsetIndex(const CXXRecordDecl *RD,
582 const CXXRecordDecl *VBase) {
583 ClassPairTy ClassPair(RD, VBase);
584
585 VirtualBaseClassIndiciesTy::iterator I =
586 VirtualBaseClassIndicies.find(ClassPair);
587 if (I != VirtualBaseClassIndicies.end())
588 return I->second;
589
590 std::vector<llvm::Constant *> methods;
591 // FIXME: This seems expensive. Can we do a partial job to get
592 // just this data.
593 VtableBuilder b(methods, RD, CGM);
594 b.GenerateVtableForBase(RD);
595 b.GenerateVtableForVBases(RD);
596
597 for (llvm::DenseMap<const CXXRecordDecl *, uint64_t>::iterator I =
598 b.getVBIndex().begin(), E = b.getVBIndex().end(); I != E; ++I) {
599 // Insert all types.
600 ClassPairTy ClassPair(RD, I->first);
601
602 VirtualBaseClassIndicies.insert(std::make_pair(ClassPair, I->second));
603 }
604
605 I = VirtualBaseClassIndicies.find(ClassPair);
606 assert(I != VirtualBaseClassIndicies.end() && "Did not find index!");
607
608 return I->second;
609}
610
611llvm::Value *CodeGenFunction::GenerateVtable(const CXXRecordDecl *RD) {
612 llvm::SmallString<256> OutName;
613 llvm::raw_svector_ostream Out(OutName);
Anders Carlssondbd920c2009-10-11 22:13:54 +0000614 mangleCXXVtable(CGM.getMangleContext(), RD, Out);
Benjamin Kramer7a9474e2009-10-11 22:57:54 +0000615
Anders Carlssondbd920c2009-10-11 22:13:54 +0000616 llvm::GlobalVariable::LinkageTypes linktype;
Chandler Carruth6ade6212009-10-26 17:14:14 +0000617 linktype = llvm::GlobalValue::LinkOnceODRLinkage;
Anders Carlssondbd920c2009-10-11 22:13:54 +0000618 std::vector<llvm::Constant *> methods;
619 llvm::Type *Ptr8Ty=llvm::PointerType::get(llvm::Type::getInt8Ty(VMContext),0);
620 int64_t AddressPoint;
621
622 VtableBuilder b(methods, RD, CGM);
623
624 // First comes the vtables for all the non-virtual bases...
625 AddressPoint = b.GenerateVtableForBase(RD);
626
627 // then the vtables for all the virtual bases.
628 b.GenerateVtableForVBases(RD);
629
630 llvm::Constant *C;
631 llvm::ArrayType *type = llvm::ArrayType::get(Ptr8Ty, methods.size());
632 C = llvm::ConstantArray::get(type, methods);
633 llvm::Value *vtable = new llvm::GlobalVariable(CGM.getModule(), type, true,
634 linktype, C, Out.str());
635 vtable = Builder.CreateBitCast(vtable, Ptr8Ty);
636 vtable = Builder.CreateGEP(vtable,
637 llvm::ConstantInt::get(llvm::Type::getInt64Ty(VMContext),
638 AddressPoint*LLVMPointerWidth/8));
639 return vtable;
640}