blob: 3962ecdc387d747adee0654955d354cd3085dc9f [file] [log] [blame]
Anders Carlsson2bb27f52009-10-11 22:13:54 +00001//===--- CGVtable.cpp - Emit LLVM Code for C++ vtables --------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This contains code dealing with C++ code generation of virtual tables.
11//
12//===----------------------------------------------------------------------===//
13
14#include "CodeGenModule.h"
15#include "CodeGenFunction.h"
16
17#include "clang/AST/RecordLayout.h"
18
19using namespace clang;
20using namespace CodeGen;
21
22class VtableBuilder {
23public:
24 /// Index_t - Vtable index type.
25 typedef uint64_t Index_t;
26private:
27 std::vector<llvm::Constant *> &methods;
28 std::vector<llvm::Constant *> submethods;
29 llvm::Type *Ptr8Ty;
30 /// Class - The most derived class that this vtable is being built for.
31 const CXXRecordDecl *Class;
32 /// BLayout - Layout for the most derived class that this vtable is being
33 /// built for.
34 const ASTRecordLayout &BLayout;
35 llvm::SmallSet<const CXXRecordDecl *, 32> IndirectPrimary;
36 llvm::SmallSet<const CXXRecordDecl *, 32> SeenVBase;
37 llvm::Constant *rtti;
38 llvm::LLVMContext &VMContext;
39 CodeGenModule &CGM; // Per-module state.
40 /// Index - Maps a method decl into a vtable index. Useful for virtual
41 /// dispatch codegen.
42 llvm::DenseMap<const CXXMethodDecl *, Index_t> Index;
43 llvm::DenseMap<const CXXMethodDecl *, Index_t> VCall;
44 llvm::DenseMap<const CXXMethodDecl *, Index_t> VCallOffset;
45 llvm::DenseMap<const CXXRecordDecl *, Index_t> VBIndex;
Mike Stumpbb9ff052009-10-27 23:46:47 +000046
47 typedef llvm::DenseMap<const CXXMethodDecl *, int> Pures_t;
48 Pures_t Pures;
Anders Carlsson2bb27f52009-10-11 22:13:54 +000049 typedef std::pair<Index_t, Index_t> CallOffset;
50 typedef llvm::DenseMap<const CXXMethodDecl *, CallOffset> Thunks_t;
51 Thunks_t Thunks;
52 typedef llvm::DenseMap<const CXXMethodDecl *,
Mike Stump87876a02009-10-13 10:55:21 +000053 std::pair<std::pair<CallOffset, CallOffset>,
54 CanQualType> > CovariantThunks_t;
Anders Carlsson2bb27f52009-10-11 22:13:54 +000055 CovariantThunks_t CovariantThunks;
56 std::vector<Index_t> VCalls;
57 typedef CXXRecordDecl::method_iterator method_iter;
58 // FIXME: Linkage should follow vtable
59 const bool Extern;
60 const uint32_t LLVMPointerWidth;
61 Index_t extra;
Mike Stump37dbe962009-10-15 02:04:03 +000062 int CurrentVBaseOffset;
63 typedef std::vector<std::pair<const CXXRecordDecl *, int64_t> > Path_t;
Mike Stumpbb9ff052009-10-27 23:46:47 +000064 llvm::Constant *cxa_pure;
Anders Carlsson2bb27f52009-10-11 22:13:54 +000065public:
66 VtableBuilder(std::vector<llvm::Constant *> &meth,
67 const CXXRecordDecl *c,
68 CodeGenModule &cgm)
69 : methods(meth), Class(c), BLayout(cgm.getContext().getASTRecordLayout(c)),
70 rtti(cgm.GenerateRtti(c)), VMContext(cgm.getModule().getContext()),
71 CGM(cgm), Extern(true),
Mike Stump37dbe962009-10-15 02:04:03 +000072 LLVMPointerWidth(cgm.getContext().Target.getPointerWidth(0)),
73 CurrentVBaseOffset(0) {
Anders Carlsson2bb27f52009-10-11 22:13:54 +000074 Ptr8Ty = llvm::PointerType::get(llvm::Type::getInt8Ty(VMContext), 0);
Mike Stump375faa82009-10-28 00:35:46 +000075
76 // Calculate pointer for ___cxa_pure_virtual.
77 const llvm::FunctionType *FTy;
78 std::vector<const llvm::Type*> ArgTys;
79 const llvm::Type *ResultType = llvm::Type::getVoidTy(VMContext);
80 FTy = llvm::FunctionType::get(ResultType, ArgTys, false);
81 cxa_pure = wrap(CGM.CreateRuntimeFunction(FTy, "__cxa_pure_virtual"));
Anders Carlsson2bb27f52009-10-11 22:13:54 +000082 }
83
84 llvm::DenseMap<const CXXMethodDecl *, Index_t> &getIndex() { return Index; }
85 llvm::DenseMap<const CXXRecordDecl *, Index_t> &getVBIndex()
86 { return VBIndex; }
87
88 llvm::Constant *wrap(Index_t i) {
89 llvm::Constant *m;
90 m = llvm::ConstantInt::get(llvm::Type::getInt64Ty(VMContext), i);
91 return llvm::ConstantExpr::getIntToPtr(m, Ptr8Ty);
92 }
93
94 llvm::Constant *wrap(llvm::Constant *m) {
95 return llvm::ConstantExpr::getBitCast(m, Ptr8Ty);
96 }
97
98 void GenerateVBaseOffsets(std::vector<llvm::Constant *> &offsets,
Mike Stump87876a02009-10-13 10:55:21 +000099 const CXXRecordDecl *RD, uint64_t Offset,
Mike Stump28431212009-10-13 22:54:56 +0000100 bool updateVBIndex, Index_t current_vbindex) {
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000101 for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
102 e = RD->bases_end(); i != e; ++i) {
103 const CXXRecordDecl *Base =
104 cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
Mike Stump28431212009-10-13 22:54:56 +0000105 Index_t next_vbindex = current_vbindex;
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000106 if (i->isVirtual() && !SeenVBase.count(Base)) {
107 SeenVBase.insert(Base);
108 int64_t BaseOffset = -(Offset/8) + BLayout.getVBaseClassOffset(Base)/8;
109 llvm::Constant *m = wrap(BaseOffset);
110 m = wrap((0?700:0) + BaseOffset);
Mike Stump28431212009-10-13 22:54:56 +0000111 if (updateVBIndex) {
112 next_vbindex = (ssize_t)(-(offsets.size()*LLVMPointerWidth/8)
113 - 3*LLVMPointerWidth/8);
114 VBIndex[Base] = next_vbindex;
115 }
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000116 offsets.push_back(m);
117 }
Mike Stump28431212009-10-13 22:54:56 +0000118 // We also record offsets for non-virtual bases to closest enclosing
119 // virtual base. We do this so that we don't have to search
120 // for the nearst virtual base class when generating thunks.
121 if (updateVBIndex && VBIndex.count(Base) == 0)
122 VBIndex[Base] = next_vbindex;
123 GenerateVBaseOffsets(offsets, Base, Offset, updateVBIndex, next_vbindex);
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000124 }
125 }
126
127 void StartNewTable() {
128 SeenVBase.clear();
129 }
130
131 Index_t VBlookup(CXXRecordDecl *D, CXXRecordDecl *B);
132
Mike Stump8bccbfd2009-10-15 09:30:16 +0000133 Index_t getNVOffset_1(const CXXRecordDecl *D, const CXXRecordDecl *B,
134 Index_t Offset = 0) {
135
136 if (B == D)
137 return Offset;
138
139 const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(D);
140 for (CXXRecordDecl::base_class_const_iterator i = D->bases_begin(),
141 e = D->bases_end(); i != e; ++i) {
142 const CXXRecordDecl *Base =
143 cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
144 int64_t BaseOffset = 0;
145 if (!i->isVirtual())
146 BaseOffset = Offset + Layout.getBaseClassOffset(Base);
147 int64_t o = getNVOffset_1(Base, B, BaseOffset);
148 if (o >= 0)
149 return o;
150 }
151
152 return -1;
153 }
154
155 /// getNVOffset - Returns the non-virtual offset for the given (B) base of the
156 /// derived class D.
157 Index_t getNVOffset(QualType qB, QualType qD) {
158 qD = qD->getAs<PointerType>()->getPointeeType();
159 qB = qB->getAs<PointerType>()->getPointeeType();
160 CXXRecordDecl *D = cast<CXXRecordDecl>(qD->getAs<RecordType>()->getDecl());
161 CXXRecordDecl *B = cast<CXXRecordDecl>(qB->getAs<RecordType>()->getDecl());
162 int64_t o = getNVOffset_1(D, B);
163 if (o >= 0)
164 return o;
165
166 assert(false && "FIXME: non-virtual base not found");
167 return 0;
168 }
169
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000170 /// getVbaseOffset - Returns the index into the vtable for the virtual base
171 /// offset for the given (B) virtual base of the derived class D.
172 Index_t getVbaseOffset(QualType qB, QualType qD) {
173 qD = qD->getAs<PointerType>()->getPointeeType();
174 qB = qB->getAs<PointerType>()->getPointeeType();
175 CXXRecordDecl *D = cast<CXXRecordDecl>(qD->getAs<RecordType>()->getDecl());
176 CXXRecordDecl *B = cast<CXXRecordDecl>(qB->getAs<RecordType>()->getDecl());
177 if (D != Class)
178 return VBlookup(D, B);
179 llvm::DenseMap<const CXXRecordDecl *, Index_t>::iterator i;
180 i = VBIndex.find(B);
181 if (i != VBIndex.end())
182 return i->second;
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000183
Mike Stump28431212009-10-13 22:54:56 +0000184 assert(false && "FIXME: Base not found");
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000185 return 0;
186 }
187
188 bool OverrideMethod(const CXXMethodDecl *MD, llvm::Constant *m,
Mike Stump8bccbfd2009-10-15 09:30:16 +0000189 bool MorallyVirtual, Index_t OverrideOffset,
190 Index_t Offset) {
Mike Stump375faa82009-10-28 00:35:46 +0000191 const bool isPure = MD->isPure();
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000192 typedef CXXMethodDecl::method_iterator meth_iter;
Mike Stump8bccbfd2009-10-15 09:30:16 +0000193 // FIXME: Should OverrideOffset's be Offset?
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000194
195 // FIXME: Don't like the nested loops. For very large inheritance
196 // heirarchies we could have a table on the side with the final overridder
197 // and just replace each instance of an overridden method once. Would be
198 // nice to measure the cost/benefit on real code.
199
200 for (meth_iter mi = MD->begin_overridden_methods(),
201 e = MD->end_overridden_methods();
202 mi != e; ++mi) {
203 const CXXMethodDecl *OMD = *mi;
204 llvm::Constant *om;
205 om = CGM.GetAddrOfFunction(OMD, Ptr8Ty);
206 om = llvm::ConstantExpr::getBitCast(om, Ptr8Ty);
207
208 for (Index_t i = 0, e = submethods.size();
209 i != e; ++i) {
210 // FIXME: begin_overridden_methods might be too lax, covariance */
211 if (submethods[i] != om)
212 continue;
213 QualType nc_oret = OMD->getType()->getAs<FunctionType>()->getResultType();
214 CanQualType oret = CGM.getContext().getCanonicalType(nc_oret);
215 QualType nc_ret = MD->getType()->getAs<FunctionType>()->getResultType();
216 CanQualType ret = CGM.getContext().getCanonicalType(nc_ret);
217 CallOffset ReturnOffset = std::make_pair(0, 0);
218 if (oret != ret) {
219 // FIXME: calculate offsets for covariance
Mike Stump87876a02009-10-13 10:55:21 +0000220 if (CovariantThunks.count(OMD)) {
221 oret = CovariantThunks[OMD].second;
222 CovariantThunks.erase(OMD);
223 }
Mike Stump8bccbfd2009-10-15 09:30:16 +0000224 // FIXME: Double check oret
225 Index_t nv = getNVOffset(oret, ret)/8;
Mike Stump87876a02009-10-13 10:55:21 +0000226 ReturnOffset = std::make_pair(nv, getVbaseOffset(oret, ret));
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000227 }
228 Index[MD] = i;
229 submethods[i] = m;
Mike Stump375faa82009-10-28 00:35:46 +0000230 if (isPure)
231 Pures[MD] = 1;
232 Pures.erase(OMD);
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000233 Thunks.erase(OMD);
234 if (MorallyVirtual) {
235 Index_t &idx = VCall[OMD];
236 if (idx == 0) {
Mike Stump8bccbfd2009-10-15 09:30:16 +0000237 VCallOffset[MD] = OverrideOffset/8;
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000238 idx = VCalls.size()+1;
239 VCalls.push_back(0);
240 } else {
241 VCallOffset[MD] = VCallOffset[OMD];
Mike Stump8bccbfd2009-10-15 09:30:16 +0000242 VCalls[idx-1] = -VCallOffset[OMD] + OverrideOffset/8;
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000243 }
244 VCall[MD] = idx;
245 CallOffset ThisOffset;
Mike Stump8bccbfd2009-10-15 09:30:16 +0000246 ThisOffset = std::make_pair(CurrentVBaseOffset/8 - Offset/8,
Mike Stump37dbe962009-10-15 02:04:03 +0000247 -((idx+extra+2)*LLVMPointerWidth/8));
248 // FIXME: Do we always have to build a covariant thunk to save oret,
249 // which is the containing virtual base class?
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000250 if (ReturnOffset.first || ReturnOffset.second)
Mike Stump87876a02009-10-13 10:55:21 +0000251 CovariantThunks[MD] = std::make_pair(std::make_pair(ThisOffset,
252 ReturnOffset),
253 oret);
Mike Stump375faa82009-10-28 00:35:46 +0000254 else if (!isPure)
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000255 Thunks[MD] = ThisOffset;
256 return true;
257 }
Mike Stump28431212009-10-13 22:54:56 +0000258
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000259 // FIXME: finish off
Mike Stump8bccbfd2009-10-15 09:30:16 +0000260 int64_t O = VCallOffset[OMD] - OverrideOffset/8;
261 // int64_t O = CurrentVBaseOffset/8 - OverrideOffset/8;
Mike Stump28431212009-10-13 22:54:56 +0000262 if (O || ReturnOffset.first || ReturnOffset.second) {
263 CallOffset ThisOffset = std::make_pair(O, 0);
264
265 if (ReturnOffset.first || ReturnOffset.second)
266 CovariantThunks[MD] = std::make_pair(std::make_pair(ThisOffset,
267 ReturnOffset),
268 oret);
Mike Stump375faa82009-10-28 00:35:46 +0000269 else if (!isPure)
Mike Stump28431212009-10-13 22:54:56 +0000270 Thunks[MD] = ThisOffset;
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000271 }
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000272 return true;
273 }
274 }
275
276 return false;
277 }
278
279 void InstallThunks() {
280 for (Thunks_t::iterator i = Thunks.begin(), e = Thunks.end();
281 i != e; ++i) {
282 const CXXMethodDecl *MD = i->first;
Mike Stump375faa82009-10-28 00:35:46 +0000283 assert(!MD->isPure() && "Trying to thunk a pure");
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000284 Index_t idx = Index[MD];
285 Index_t nv_O = i->second.first;
286 Index_t v_O = i->second.second;
287 submethods[idx] = CGM.BuildThunk(MD, Extern, nv_O, v_O);
288 }
289 Thunks.clear();
290 for (CovariantThunks_t::iterator i = CovariantThunks.begin(),
291 e = CovariantThunks.end();
292 i != e; ++i) {
293 const CXXMethodDecl *MD = i->first;
Mike Stump375faa82009-10-28 00:35:46 +0000294 if (MD->isPure())
295 continue;
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000296 Index_t idx = Index[MD];
Mike Stump87876a02009-10-13 10:55:21 +0000297 Index_t nv_t = i->second.first.first.first;
298 Index_t v_t = i->second.first.first.second;
299 Index_t nv_r = i->second.first.second.first;
300 Index_t v_r = i->second.first.second.second;
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000301 submethods[idx] = CGM.BuildCovariantThunk(MD, Extern, nv_t, v_t, nv_r,
302 v_r);
303 }
304 CovariantThunks.clear();
Mike Stumpbb9ff052009-10-27 23:46:47 +0000305 for (Pures_t::iterator i = Pures.begin(), e = Pures.end();
306 i != e; ++i) {
307 const CXXMethodDecl *MD = i->first;
308 Index_t idx = Index[MD];
309 submethods[idx] = cxa_pure;
310 }
311 Pures.clear();
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000312 }
313
Mike Stump18e8b472009-10-27 23:36:26 +0000314 llvm::Constant *WrapAddrOf(const CXXMethodDecl *MD) {
315 if (const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(MD))
316 return wrap(CGM.GetAddrOfCXXDestructor(Dtor, Dtor_Complete));
317
318 const FunctionProtoType *FPT = MD->getType()->getAs<FunctionProtoType>();
319 const llvm::Type *Ty =
320 CGM.getTypes().GetFunctionType(CGM.getTypes().getFunctionInfo(MD),
321 FPT->isVariadic());
322
323 return wrap(CGM.GetAddrOfFunction(MD, Ty));
324 }
325
Mike Stump8bccbfd2009-10-15 09:30:16 +0000326 void OverrideMethods(Path_t *Path, bool MorallyVirtual, int64_t Offset) {
Mike Stump37dbe962009-10-15 02:04:03 +0000327 for (Path_t::reverse_iterator i = Path->rbegin(),
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000328 e = Path->rend(); i != e; ++i) {
329 const CXXRecordDecl *RD = i->first;
Mike Stump8bccbfd2009-10-15 09:30:16 +0000330 int64_t OverrideOffset = i->second;
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000331 for (method_iter mi = RD->method_begin(), me = RD->method_end(); mi != me;
332 ++mi) {
333 if (!mi->isVirtual())
334 continue;
335
336 const CXXMethodDecl *MD = *mi;
Mike Stump18e8b472009-10-27 23:36:26 +0000337 llvm::Constant *m = WrapAddrOf(MD);
Mike Stump8bccbfd2009-10-15 09:30:16 +0000338 OverrideMethod(MD, m, MorallyVirtual, OverrideOffset, Offset);
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000339 }
340 }
341 }
342
343 void AddMethod(const CXXMethodDecl *MD, bool MorallyVirtual, Index_t Offset) {
Mike Stump18e8b472009-10-27 23:36:26 +0000344 llvm::Constant *m = WrapAddrOf(MD);
345
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000346 // If we can find a previously allocated slot for this, reuse it.
Mike Stump8bccbfd2009-10-15 09:30:16 +0000347 if (OverrideMethod(MD, m, MorallyVirtual, Offset, Offset))
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000348 return;
349
350 // else allocate a new slot.
351 Index[MD] = submethods.size();
352 submethods.push_back(m);
Mike Stump375faa82009-10-28 00:35:46 +0000353 if (MD->isPure())
354 Pures[MD] = 1;
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000355 if (MorallyVirtual) {
356 VCallOffset[MD] = Offset/8;
357 Index_t &idx = VCall[MD];
358 // Allocate the first one, after that, we reuse the previous one.
359 if (idx == 0) {
360 idx = VCalls.size()+1;
361 VCalls.push_back(0);
362 }
363 }
364 }
365
366 void AddMethods(const CXXRecordDecl *RD, bool MorallyVirtual,
367 Index_t Offset) {
368 for (method_iter mi = RD->method_begin(), me = RD->method_end(); mi != me;
369 ++mi)
370 if (mi->isVirtual())
371 AddMethod(*mi, MorallyVirtual, Offset);
372 }
373
374 void NonVirtualBases(const CXXRecordDecl *RD, const ASTRecordLayout &Layout,
375 const CXXRecordDecl *PrimaryBase,
376 bool PrimaryBaseWasVirtual, bool MorallyVirtual,
Mike Stump37dbe962009-10-15 02:04:03 +0000377 int64_t Offset, Path_t *Path) {
378 Path->push_back(std::make_pair(RD, Offset));
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000379 for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
380 e = RD->bases_end(); i != e; ++i) {
381 if (i->isVirtual())
382 continue;
383 const CXXRecordDecl *Base =
384 cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
385 if (Base != PrimaryBase || PrimaryBaseWasVirtual) {
386 uint64_t o = Offset + Layout.getBaseClassOffset(Base);
387 StartNewTable();
Mike Stump37dbe962009-10-15 02:04:03 +0000388 CurrentVBaseOffset = Offset;
389 GenerateVtableForBase(Base, MorallyVirtual, o, false, Path);
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000390 }
391 }
Mike Stump37dbe962009-10-15 02:04:03 +0000392 Path->pop_back();
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000393 }
394
Mike Stumpb21c4ee2009-10-14 18:14:51 +0000395// #define D(X) do { X; } while (0)
396#define D(X)
397
398 void insertVCalls(int InsertionPoint) {
399 llvm::Constant *e = 0;
400 D(VCalls.insert(VCalls.begin(), 673));
401 D(VCalls.push_back(672));
Mike Stump8bccbfd2009-10-15 09:30:16 +0000402 methods.insert(methods.begin() + InsertionPoint, VCalls.size(), e);
Mike Stumpb21c4ee2009-10-14 18:14:51 +0000403 // The vcalls come first...
404 for (std::vector<Index_t>::reverse_iterator i = VCalls.rbegin(),
405 e = VCalls.rend();
406 i != e; ++i)
407 methods[InsertionPoint++] = wrap((0?600:0) + *i);
408 VCalls.clear();
409 }
410
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000411 Index_t end(const CXXRecordDecl *RD, std::vector<llvm::Constant *> &offsets,
412 const ASTRecordLayout &Layout,
413 const CXXRecordDecl *PrimaryBase,
414 bool PrimaryBaseWasVirtual, bool MorallyVirtual,
Mike Stump37dbe962009-10-15 02:04:03 +0000415 int64_t Offset, bool ForVirtualBase, Path_t *Path) {
416 bool alloc = false;
417 if (Path == 0) {
418 alloc = true;
419 Path = new Path_t;
420 }
421
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000422 StartNewTable();
423 extra = 0;
424 // FIXME: Cleanup.
425 if (!ForVirtualBase) {
Mike Stumpb21c4ee2009-10-14 18:14:51 +0000426 D(methods.push_back(wrap(666)));
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000427 // then virtual base offsets...
428 for (std::vector<llvm::Constant *>::reverse_iterator i = offsets.rbegin(),
429 e = offsets.rend(); i != e; ++i)
430 methods.push_back(*i);
Mike Stumpb21c4ee2009-10-14 18:14:51 +0000431 D(methods.push_back(wrap(667)));
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000432 }
433
Mike Stumpb21c4ee2009-10-14 18:14:51 +0000434 bool DeferVCalls = MorallyVirtual || ForVirtualBase;
435 int VCallInsertionPoint = methods.size();
436 if (!DeferVCalls) {
437 insertVCalls(VCallInsertionPoint);
Mike Stump8bccbfd2009-10-15 09:30:16 +0000438 } else
439 // FIXME: just for extra, or for all uses of VCalls.size post this?
440 extra = -VCalls.size();
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000441
442 if (ForVirtualBase) {
Mike Stumpb21c4ee2009-10-14 18:14:51 +0000443 D(methods.push_back(wrap(668)));
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000444 // then virtual base offsets...
445 for (std::vector<llvm::Constant *>::reverse_iterator i = offsets.rbegin(),
446 e = offsets.rend(); i != e; ++i)
447 methods.push_back(*i);
Mike Stumpb21c4ee2009-10-14 18:14:51 +0000448 D(methods.push_back(wrap(669)));
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000449 }
450
451 methods.push_back(wrap(-(Offset/8)));
452 methods.push_back(rtti);
453 Index_t AddressPoint = methods.size();
454
455 InstallThunks();
456 methods.insert(methods.end(), submethods.begin(), submethods.end());
457 submethods.clear();
458
459 // and then the non-virtual bases.
460 NonVirtualBases(RD, Layout, PrimaryBase, PrimaryBaseWasVirtual,
Mike Stump37dbe962009-10-15 02:04:03 +0000461 MorallyVirtual, Offset, Path);
Mike Stumpb21c4ee2009-10-14 18:14:51 +0000462
463 if (ForVirtualBase) {
464 D(methods.push_back(wrap(670)));
465 insertVCalls(VCallInsertionPoint);
466 AddressPoint += VCalls.size();
467 D(methods.push_back(wrap(671)));
468 }
469
Mike Stump37dbe962009-10-15 02:04:03 +0000470 if (alloc) {
471 delete Path;
472 }
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000473 return AddressPoint;
474 }
475
476 void Primaries(const CXXRecordDecl *RD, bool MorallyVirtual, int64_t Offset) {
477 if (!RD->isDynamicClass())
478 return;
479
480 const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(RD);
481 const CXXRecordDecl *PrimaryBase = Layout.getPrimaryBase();
482 const bool PrimaryBaseWasVirtual = Layout.getPrimaryBaseWasVirtual();
483
484 // vtables are composed from the chain of primaries.
485 if (PrimaryBase) {
486 if (PrimaryBaseWasVirtual)
487 IndirectPrimary.insert(PrimaryBase);
488 Primaries(PrimaryBase, PrimaryBaseWasVirtual|MorallyVirtual, Offset);
489 }
490
491 // And add the virtuals for the class to the primary vtable.
492 AddMethods(RD, MorallyVirtual, Offset);
493 }
494
495 int64_t GenerateVtableForBase(const CXXRecordDecl *RD,
496 bool MorallyVirtual = false, int64_t Offset = 0,
497 bool ForVirtualBase = false,
Mike Stump37dbe962009-10-15 02:04:03 +0000498 Path_t *Path = 0) {
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000499 if (!RD->isDynamicClass())
500 return 0;
501
502 const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(RD);
503 const CXXRecordDecl *PrimaryBase = Layout.getPrimaryBase();
504 const bool PrimaryBaseWasVirtual = Layout.getPrimaryBaseWasVirtual();
505
506 std::vector<llvm::Constant *> offsets;
507 extra = 0;
Mike Stump28431212009-10-13 22:54:56 +0000508 GenerateVBaseOffsets(offsets, RD, Offset, !ForVirtualBase, 0);
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000509 if (ForVirtualBase)
510 extra = offsets.size();
511
Mike Stump476e2df2009-10-28 20:44:03 +0000512 Primaries(RD, MorallyVirtual, Offset);
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000513
514 if (Path)
Mike Stump8bccbfd2009-10-15 09:30:16 +0000515 OverrideMethods(Path, MorallyVirtual, Offset);
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000516
517 return end(RD, offsets, Layout, PrimaryBase, PrimaryBaseWasVirtual,
Mike Stump37dbe962009-10-15 02:04:03 +0000518 MorallyVirtual, Offset, ForVirtualBase, Path);
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000519 }
520
521 void GenerateVtableForVBases(const CXXRecordDecl *RD,
522 int64_t Offset = 0,
Mike Stump37dbe962009-10-15 02:04:03 +0000523 Path_t *Path = 0) {
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000524 bool alloc = false;
525 if (Path == 0) {
526 alloc = true;
Mike Stump37dbe962009-10-15 02:04:03 +0000527 Path = new Path_t;
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000528 }
529 // FIXME: We also need to override using all paths to a virtual base,
530 // right now, we just process the first path
531 Path->push_back(std::make_pair(RD, Offset));
532 for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
533 e = RD->bases_end(); i != e; ++i) {
534 const CXXRecordDecl *Base =
535 cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
536 if (i->isVirtual() && !IndirectPrimary.count(Base)) {
537 // Mark it so we don't output it twice.
538 IndirectPrimary.insert(Base);
539 StartNewTable();
Mike Stumpb21c4ee2009-10-14 18:14:51 +0000540 VCall.clear();
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000541 int64_t BaseOffset = BLayout.getVBaseClassOffset(Base);
Mike Stump37dbe962009-10-15 02:04:03 +0000542 CurrentVBaseOffset = BaseOffset;
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000543 GenerateVtableForBase(Base, true, BaseOffset, true, Path);
544 }
545 int64_t BaseOffset = Offset;
546 if (i->isVirtual())
547 BaseOffset = BLayout.getVBaseClassOffset(Base);
Mike Stump37dbe962009-10-15 02:04:03 +0000548 if (Base->getNumVBases()) {
549 CurrentVBaseOffset = BaseOffset;
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000550 GenerateVtableForVBases(Base, BaseOffset, Path);
Mike Stump37dbe962009-10-15 02:04:03 +0000551 }
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000552 }
553 Path->pop_back();
554 if (alloc)
555 delete Path;
556 }
557};
558
559
560VtableBuilder::Index_t VtableBuilder::VBlookup(CXXRecordDecl *D,
561 CXXRecordDecl *B) {
562 return CGM.getVtableInfo().getVirtualBaseOffsetIndex(D, B);
563}
564
565int64_t CGVtableInfo::getMethodVtableIndex(const CXXMethodDecl *MD) {
566 MD = MD->getCanonicalDecl();
567
568 MethodVtableIndicesTy::iterator I = MethodVtableIndices.find(MD);
569 if (I != MethodVtableIndices.end())
570 return I->second;
571
572 const CXXRecordDecl *RD = MD->getParent();
573
574 std::vector<llvm::Constant *> methods;
575 // FIXME: This seems expensive. Can we do a partial job to get
576 // just this data.
577 VtableBuilder b(methods, RD, CGM);
578 b.GenerateVtableForBase(RD);
579 b.GenerateVtableForVBases(RD);
580
581 MethodVtableIndices.insert(b.getIndex().begin(),
582 b.getIndex().end());
583
584 I = MethodVtableIndices.find(MD);
585 assert(I != MethodVtableIndices.end() && "Did not find index!");
586 return I->second;
587}
588
589int64_t CGVtableInfo::getVirtualBaseOffsetIndex(const CXXRecordDecl *RD,
590 const CXXRecordDecl *VBase) {
591 ClassPairTy ClassPair(RD, VBase);
592
593 VirtualBaseClassIndiciesTy::iterator I =
594 VirtualBaseClassIndicies.find(ClassPair);
595 if (I != VirtualBaseClassIndicies.end())
596 return I->second;
597
598 std::vector<llvm::Constant *> methods;
599 // FIXME: This seems expensive. Can we do a partial job to get
600 // just this data.
601 VtableBuilder b(methods, RD, CGM);
602 b.GenerateVtableForBase(RD);
603 b.GenerateVtableForVBases(RD);
604
605 for (llvm::DenseMap<const CXXRecordDecl *, uint64_t>::iterator I =
606 b.getVBIndex().begin(), E = b.getVBIndex().end(); I != E; ++I) {
607 // Insert all types.
608 ClassPairTy ClassPair(RD, I->first);
609
610 VirtualBaseClassIndicies.insert(std::make_pair(ClassPair, I->second));
611 }
612
613 I = VirtualBaseClassIndicies.find(ClassPair);
614 assert(I != VirtualBaseClassIndicies.end() && "Did not find index!");
615
616 return I->second;
617}
618
619llvm::Value *CodeGenFunction::GenerateVtable(const CXXRecordDecl *RD) {
620 llvm::SmallString<256> OutName;
621 llvm::raw_svector_ostream Out(OutName);
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000622 mangleCXXVtable(CGM.getMangleContext(), RD, Out);
Benjamin Kramerbb0a07b2009-10-11 22:57:54 +0000623
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000624 llvm::GlobalVariable::LinkageTypes linktype;
Chandler Carruth6e0df532009-10-26 17:14:14 +0000625 linktype = llvm::GlobalValue::LinkOnceODRLinkage;
Anders Carlsson2bb27f52009-10-11 22:13:54 +0000626 std::vector<llvm::Constant *> methods;
627 llvm::Type *Ptr8Ty=llvm::PointerType::get(llvm::Type::getInt8Ty(VMContext),0);
628 int64_t AddressPoint;
629
630 VtableBuilder b(methods, RD, CGM);
631
632 // First comes the vtables for all the non-virtual bases...
633 AddressPoint = b.GenerateVtableForBase(RD);
634
635 // then the vtables for all the virtual bases.
636 b.GenerateVtableForVBases(RD);
637
638 llvm::Constant *C;
639 llvm::ArrayType *type = llvm::ArrayType::get(Ptr8Ty, methods.size());
640 C = llvm::ConstantArray::get(type, methods);
641 llvm::Value *vtable = new llvm::GlobalVariable(CGM.getModule(), type, true,
642 linktype, C, Out.str());
643 vtable = Builder.CreateBitCast(vtable, Ptr8Ty);
644 vtable = Builder.CreateGEP(vtable,
645 llvm::ConstantInt::get(llvm::Type::getInt64Ty(VMContext),
646 AddressPoint*LLVMPointerWidth/8));
647 return vtable;
648}