blob: b4b98d3357a8b478ec5f6b39b06ca329765f8041 [file] [log] [blame]
Reid Kleckner90633022013-06-19 15:20:38 +00001//===--- MicrosoftVBTables.cpp - Virtual Base Table Emission --------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This class generates data about MSVC virtual base tables.
11//
12//===----------------------------------------------------------------------===//
13
14#include "MicrosoftVBTables.h"
15#include "CodeGenModule.h"
16#include "CGCXXABI.h"
17
18namespace clang {
19namespace CodeGen {
20
21/// Holds intermediate data about a path to a vbptr inside a base subobject.
22struct VBTablePath {
23 VBTablePath(const VBTableInfo &VBInfo)
24 : VBInfo(VBInfo), NextBase(VBInfo.VBPtrSubobject.getBase()) { }
25
26 /// All the data needed to build a vbtable, minus the GlobalVariable whose
27 /// name we haven't computed yet.
28 VBTableInfo VBInfo;
29
30 /// Next base to use for disambiguation. Can be null if we've already
31 /// disambiguated this path once.
32 const CXXRecordDecl *NextBase;
33
34 /// Path is not really a full path like a CXXBasePath. It holds the subset of
35 /// records that need to be mangled into the vbtable symbol name in order to get
36 /// a unique name.
37 llvm::SmallVector<const CXXRecordDecl *, 1> Path;
38};
39
40VBTableBuilder::VBTableBuilder(CodeGenModule &CGM,
41 const CXXRecordDecl *MostDerived)
42 : CGM(CGM), MostDerived(MostDerived),
43 DerivedLayout(CGM.getContext().getASTRecordLayout(MostDerived)) {}
44
45void VBTableBuilder::enumerateVBTables(VBTableVector &VBTables) {
46 VBTablePathVector Paths;
47 findUnambiguousPaths(MostDerived, BaseSubobject(MostDerived,
48 CharUnits::Zero()), Paths);
49 for (VBTablePathVector::iterator I = Paths.begin(), E = Paths.end();
50 I != E; ++I) {
51 VBTablePath *P = *I;
52 P->VBInfo.GV = getAddrOfVBTable(P->VBInfo.ReusingBase, P->Path);
53 VBTables.push_back(P->VBInfo);
54 }
55}
56
Reid Kleckner90633022013-06-19 15:20:38 +000057
58void VBTableBuilder::findUnambiguousPaths(const CXXRecordDecl *ReusingBase,
59 BaseSubobject CurSubobject,
60 VBTablePathVector &Paths) {
61 size_t PathsStart = Paths.size();
62 bool ReuseVBPtrFromBase = true;
63 const CXXRecordDecl *CurBase = CurSubobject.getBase();
Warren Huntb2969b12013-10-11 20:19:00 +000064 const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(CurBase);
Reid Kleckner90633022013-06-19 15:20:38 +000065
66 // If this base has a vbptr, then we've found a path. These are not full
67 // paths, so we don't use CXXBasePath.
Warren Huntb2969b12013-10-11 20:19:00 +000068 if (Layout.hasOwnVBPtr()) {
Reid Kleckner90633022013-06-19 15:20:38 +000069 ReuseVBPtrFromBase = false;
70 VBTablePath *Info = new VBTablePath(
71 VBTableInfo(ReusingBase, CurSubobject, /*GV=*/0));
72 Paths.push_back(Info);
73 }
74
75 // Recurse onto any bases which themselves have virtual bases.
Reid Kleckner90633022013-06-19 15:20:38 +000076 for (CXXRecordDecl::base_class_const_iterator I = CurBase->bases_begin(),
77 E = CurBase->bases_end(); I != E; ++I) {
78 const CXXRecordDecl *Base = I->getType()->getAsCXXRecordDecl();
79 if (!Base->getNumVBases())
80 continue; // Bases without virtual bases have no vbptrs.
81 CharUnits NextOffset;
82 const CXXRecordDecl *NextReusingBase = Base;
83 if (I->isVirtual()) {
84 if (!VBasesSeen.insert(Base))
85 continue; // Don't visit virtual bases twice.
86 NextOffset = DerivedLayout.getVBaseClassOffset(Base);
87 } else {
88 NextOffset = (CurSubobject.getBaseOffset() +
89 Layout.getBaseClassOffset(Base));
90
91 // If CurBase didn't have a vbptr, then ReusingBase will reuse the vbptr
92 // from the first non-virtual base with vbases for its vbptr.
93 if (ReuseVBPtrFromBase) {
94 NextReusingBase = ReusingBase;
95 ReuseVBPtrFromBase = false;
96 }
97 }
98
99 size_t NumPaths = Paths.size();
100 findUnambiguousPaths(NextReusingBase, BaseSubobject(Base, NextOffset),
101 Paths);
102
103 // Tag paths through this base with the base itself. We might use it to
104 // disambiguate.
105 for (size_t I = NumPaths, E = Paths.size(); I != E; ++I)
106 Paths[I]->NextBase = Base;
107 }
108
109 bool AmbiguousPaths = rebucketPaths(Paths, PathsStart);
110 if (AmbiguousPaths)
111 rebucketPaths(Paths, PathsStart, /*SecondPass=*/true);
112
113#ifndef NDEBUG
114 // Check that the paths are in fact unique.
115 for (size_t I = PathsStart + 1, E = Paths.size(); I != E; ++I) {
116 assert(Paths[I]->Path != Paths[I - 1]->Path && "vbtable paths are not unique");
117 }
118#endif
119}
120
121static bool pathCompare(VBTablePath *LHS, VBTablePath *RHS) {
122 return LHS->Path < RHS->Path;
123}
124
125void VBTableBuilder::extendPath(VBTablePath *P, bool SecondPass) {
126 assert(P->NextBase || SecondPass);
127 if (P->NextBase) {
128 P->Path.push_back(P->NextBase);
129 P->NextBase = 0; // Prevent the path from being extended twice.
130 }
131}
132
133bool VBTableBuilder::rebucketPaths(VBTablePathVector &Paths, size_t PathsStart,
134 bool SecondPass) {
135 // What we're essentially doing here is bucketing together ambiguous paths.
136 // Any bucket with more than one path in it gets extended by NextBase, which
137 // is usually the direct base of the inherited the vbptr. This code uses a
138 // sorted vector to implement a multiset to form the buckets. Note that the
139 // ordering is based on pointers, but it doesn't change our output order. The
140 // current algorithm is designed to match MSVC 2012's names.
141 // TODO: Implement MSVC 2010 or earlier names to avoid extra vbtable cruft.
142 VBTablePathVector PathsSorted(&Paths[PathsStart], &Paths.back() + 1);
143 std::sort(PathsSorted.begin(), PathsSorted.end(), pathCompare);
144 bool AmbiguousPaths = false;
145 for (size_t I = 0, E = PathsSorted.size(); I != E;) {
146 // Scan forward to find the end of the bucket.
147 size_t BucketStart = I;
148 do {
149 ++I;
150 } while (I != E && PathsSorted[BucketStart]->Path == PathsSorted[I]->Path);
151
152 // If this bucket has multiple paths, extend them all.
153 if (I - BucketStart > 1) {
154 AmbiguousPaths = true;
155 for (size_t II = BucketStart; II != I; ++II)
156 extendPath(PathsSorted[II], SecondPass);
157 }
158 }
159 return AmbiguousPaths;
160}
161
162llvm::GlobalVariable *
163VBTableBuilder::getAddrOfVBTable(const CXXRecordDecl *ReusingBase,
164 ArrayRef<const CXXRecordDecl *> BasePath) {
165 // Caching at this layer is redundant with the caching in EnumerateVBTables().
166
167 SmallString<256> OutName;
168 llvm::raw_svector_ostream Out(OutName);
Timur Iskhodzhanov11f22a32013-10-03 06:26:13 +0000169 MicrosoftMangleContext &Mangler =
170 cast<MicrosoftMangleContext>(CGM.getCXXABI().getMangleContext());
Reid Kleckner90633022013-06-19 15:20:38 +0000171 Mangler.mangleCXXVBTable(MostDerived, BasePath, Out);
172 Out.flush();
173 StringRef Name = OutName.str();
174
175 llvm::ArrayType *VBTableType =
176 llvm::ArrayType::get(CGM.IntTy, 1 + ReusingBase->getNumVBases());
177
178 assert(!CGM.getModule().getNamedGlobal(Name) &&
179 "vbtable with this name already exists: mangling bug?");
180 llvm::GlobalVariable *VBTable =
181 CGM.CreateOrReplaceCXXRuntimeVariable(Name, VBTableType,
182 llvm::GlobalValue::ExternalLinkage);
183 VBTable->setUnnamedAddr(true);
184 return VBTable;
185}
186
187void VBTableInfo::EmitVBTableDefinition(
188 CodeGenModule &CGM, const CXXRecordDecl *RD,
189 llvm::GlobalVariable::LinkageTypes Linkage) const {
190 assert(RD->getNumVBases() && ReusingBase->getNumVBases() &&
191 "should only emit vbtables for classes with vbtables");
192
193 const ASTRecordLayout &BaseLayout =
194 CGM.getContext().getASTRecordLayout(VBPtrSubobject.getBase());
195 const ASTRecordLayout &DerivedLayout =
196 CGM.getContext().getASTRecordLayout(RD);
197
198 SmallVector<llvm::Constant *, 4> Offsets;
199
200 // The offset from ReusingBase's vbptr to itself always leads.
201 CharUnits VBPtrOffset = BaseLayout.getVBPtrOffset();
202 Offsets.push_back(
203 llvm::ConstantInt::get(CGM.IntTy, -VBPtrOffset.getQuantity()));
204
205 // These are laid out in the same order as in Itanium, which is the same as
206 // the order of the vbase iterator.
207 for (CXXRecordDecl::base_class_const_iterator I = ReusingBase->vbases_begin(),
208 E = ReusingBase->vbases_end(); I != E; ++I) {
209 const CXXRecordDecl *VBase = I->getType()->getAsCXXRecordDecl();
210 CharUnits Offset = DerivedLayout.getVBaseClassOffset(VBase);
211 assert(!Offset.isNegative());
212 // Make it relative to the subobject vbptr.
213 Offset -= VBPtrSubobject.getBaseOffset() + VBPtrOffset;
214 Offsets.push_back(llvm::ConstantInt::get(CGM.IntTy, Offset.getQuantity()));
215 }
216
217 assert(Offsets.size() ==
218 cast<llvm::ArrayType>(cast<llvm::PointerType>(GV->getType())
219 ->getElementType())->getNumElements());
220 llvm::ArrayType *VBTableType =
221 llvm::ArrayType::get(CGM.IntTy, Offsets.size());
222 llvm::Constant *Init = llvm::ConstantArray::get(VBTableType, Offsets);
223 GV->setInitializer(Init);
224
225 // Set the correct linkage.
226 GV->setLinkage(Linkage);
227
228 // Set the right visibility.
229 CGM.setTypeVisibility(GV, RD, CodeGenModule::TVK_ForVTable);
230}
231
232} // namespace CodeGen
233} // namespace clang