blob: f2864944f9a12a60531b168486da422a516830dc [file] [log] [blame]
Daniel Dunbar23ee4b72010-03-31 00:11:27 +00001//===--- CGRecordLayoutBuilder.cpp - CGRecordLayout builder ----*- C++ -*-===//
Anders Carlsson307846f2009-07-23 03:17:50 +00002//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
Daniel Dunbar23ee4b72010-03-31 00:11:27 +000010// Builder implementation for CGRecordLayout objects.
Anders Carlsson307846f2009-07-23 03:17:50 +000011//
12//===----------------------------------------------------------------------===//
13
Daniel Dunbar072d0bb2010-03-30 22:26:10 +000014#include "CGRecordLayout.h"
Anders Carlsson307846f2009-07-23 03:17:50 +000015#include "clang/AST/ASTContext.h"
16#include "clang/AST/Attr.h"
Anders Carlsson4131f002010-11-24 22:50:27 +000017#include "clang/AST/CXXInheritance.h"
Anders Carlsson307846f2009-07-23 03:17:50 +000018#include "clang/AST/DeclCXX.h"
19#include "clang/AST/Expr.h"
20#include "clang/AST/RecordLayout.h"
21#include "CodeGenTypes.h"
John McCall614dbdc2010-08-22 21:01:12 +000022#include "CGCXXABI.h"
Anders Carlsson307846f2009-07-23 03:17:50 +000023#include "llvm/DerivedTypes.h"
Daniel Dunbarb97bff92010-04-12 18:14:18 +000024#include "llvm/Type.h"
Daniel Dunbar2ba67442010-04-21 19:10:49 +000025#include "llvm/Support/Debug.h"
Daniel Dunbarb97bff92010-04-12 18:14:18 +000026#include "llvm/Support/raw_ostream.h"
Anders Carlsson307846f2009-07-23 03:17:50 +000027#include "llvm/Target/TargetData.h"
Anders Carlsson307846f2009-07-23 03:17:50 +000028using namespace clang;
29using namespace CodeGen;
30
John McCallbcd38212010-11-30 23:17:27 +000031namespace {
Daniel Dunbar23ee4b72010-03-31 00:11:27 +000032
33class CGRecordLayoutBuilder {
34public:
35 /// FieldTypes - Holds the LLVM types that the struct is created from.
John McCall0217dfc22011-02-15 06:40:56 +000036 ///
Daniel Dunbar23ee4b72010-03-31 00:11:27 +000037 std::vector<const llvm::Type *> FieldTypes;
38
John McCall0217dfc22011-02-15 06:40:56 +000039 /// BaseSubobjectType - Holds the LLVM type for the non-virtual part
Anders Carlssonc1351ca2010-11-09 05:25:47 +000040 /// of the struct. For example, consider:
41 ///
42 /// struct A { int i; };
43 /// struct B { void *v; };
44 /// struct C : virtual A, B { };
45 ///
46 /// The LLVM type of C will be
47 /// %struct.C = type { i32 (...)**, %struct.A, i32, %struct.B }
48 ///
49 /// And the LLVM type of the non-virtual base struct will be
50 /// %struct.C.base = type { i32 (...)**, %struct.A, i32 }
John McCall0217dfc22011-02-15 06:40:56 +000051 ///
52 /// This only gets initialized if the base subobject type is
53 /// different from the complete-object type.
54 const llvm::StructType *BaseSubobjectType;
Anders Carlssonc1351ca2010-11-09 05:25:47 +000055
John McCall0217dfc22011-02-15 06:40:56 +000056 /// FieldInfo - Holds a field and its corresponding LLVM field number.
57 llvm::DenseMap<const FieldDecl *, unsigned> Fields;
Daniel Dunbar23ee4b72010-03-31 00:11:27 +000058
John McCall0217dfc22011-02-15 06:40:56 +000059 /// BitFieldInfo - Holds location and size information about a bit field.
60 llvm::DenseMap<const FieldDecl *, CGBitFieldInfo> BitFields;
Daniel Dunbar23ee4b72010-03-31 00:11:27 +000061
John McCall0217dfc22011-02-15 06:40:56 +000062 llvm::DenseMap<const CXXRecordDecl *, unsigned> NonVirtualBases;
63 llvm::DenseMap<const CXXRecordDecl *, unsigned> VirtualBases;
Anders Carlsson4131f002010-11-24 22:50:27 +000064
65 /// IndirectPrimaryBases - Virtual base classes, direct or indirect, that are
66 /// primary base classes for some other direct or indirect base class.
67 CXXIndirectPrimaryBaseSet IndirectPrimaryBases;
68
Anders Carlssona459adb2010-11-28 19:18:44 +000069 /// LaidOutVirtualBases - A set of all laid out virtual bases, used to avoid
70 /// avoid laying out virtual bases more than once.
71 llvm::SmallPtrSet<const CXXRecordDecl *, 4> LaidOutVirtualBases;
72
John McCall614dbdc2010-08-22 21:01:12 +000073 /// IsZeroInitializable - Whether this struct can be C++
74 /// zero-initialized with an LLVM zeroinitializer.
75 bool IsZeroInitializable;
John McCall0217dfc22011-02-15 06:40:56 +000076 bool IsZeroInitializableAsBase;
Daniel Dunbar23ee4b72010-03-31 00:11:27 +000077
78 /// Packed - Whether the resulting LLVM struct will be packed or not.
79 bool Packed;
80
81private:
82 CodeGenTypes &Types;
83
84 /// Alignment - Contains the alignment of the RecordDecl.
85 //
86 // FIXME: This is not needed and should be removed.
87 unsigned Alignment;
88
Daniel Dunbar23ee4b72010-03-31 00:11:27 +000089 /// BitsAvailableInLastField - If a bit field spans only part of a LLVM field,
90 /// this will have the number of bits still available in the field.
91 char BitsAvailableInLastField;
92
93 /// NextFieldOffsetInBytes - Holds the next field offset in bytes.
94 uint64_t NextFieldOffsetInBytes;
95
Anders Carlsson1de2f572010-04-17 20:49:27 +000096 /// LayoutUnionField - Will layout a field in an union and return the type
97 /// that the field will have.
98 const llvm::Type *LayoutUnionField(const FieldDecl *Field,
99 const ASTRecordLayout &Layout);
100
Daniel Dunbar23ee4b72010-03-31 00:11:27 +0000101 /// LayoutUnion - Will layout a union RecordDecl.
102 void LayoutUnion(const RecordDecl *D);
103
104 /// LayoutField - try to layout all fields in the record decl.
105 /// Returns false if the operation failed because the struct is not packed.
106 bool LayoutFields(const RecordDecl *D);
107
Anders Carlssona518b2a2010-12-04 23:59:48 +0000108 /// Layout a single base, virtual or non-virtual
109 void LayoutBase(const CXXRecordDecl *BaseDecl, uint64_t BaseOffset);
110
Anders Carlsson1f95ee32010-11-25 01:59:35 +0000111 /// LayoutVirtualBase - layout a single virtual base.
112 void LayoutVirtualBase(const CXXRecordDecl *BaseDecl, uint64_t BaseOffset);
113
Anders Carlssona459adb2010-11-28 19:18:44 +0000114 /// LayoutVirtualBases - layout the virtual bases of a record decl.
115 void LayoutVirtualBases(const CXXRecordDecl *RD,
116 const ASTRecordLayout &Layout);
117
Anders Carlssonaf9e5af2010-05-18 05:12:20 +0000118 /// LayoutNonVirtualBase - layout a single non-virtual base.
119 void LayoutNonVirtualBase(const CXXRecordDecl *BaseDecl,
120 uint64_t BaseOffset);
121
Anders Carlssona459adb2010-11-28 19:18:44 +0000122 /// LayoutNonVirtualBases - layout the virtual bases of a record decl.
Anders Carlssonaf9e5af2010-05-18 05:12:20 +0000123 void LayoutNonVirtualBases(const CXXRecordDecl *RD,
124 const ASTRecordLayout &Layout);
Daniel Dunbar23ee4b72010-03-31 00:11:27 +0000125
Anders Carlssonc1351ca2010-11-09 05:25:47 +0000126 /// ComputeNonVirtualBaseType - Compute the non-virtual base field types.
Argyrios Kyrtzidis648fcbe2010-12-10 00:11:00 +0000127 bool ComputeNonVirtualBaseType(const CXXRecordDecl *RD);
Anders Carlssonc1351ca2010-11-09 05:25:47 +0000128
Daniel Dunbar23ee4b72010-03-31 00:11:27 +0000129 /// LayoutField - layout a single field. Returns false if the operation failed
130 /// because the current struct is not packed.
131 bool LayoutField(const FieldDecl *D, uint64_t FieldOffset);
132
133 /// LayoutBitField - layout a single bit field.
134 void LayoutBitField(const FieldDecl *D, uint64_t FieldOffset);
135
136 /// AppendField - Appends a field with the given offset and type.
137 void AppendField(uint64_t FieldOffsetInBytes, const llvm::Type *FieldTy);
138
Daniel Dunbar23ee4b72010-03-31 00:11:27 +0000139 /// AppendPadding - Appends enough padding bytes so that the total
140 /// struct size is a multiple of the field alignment.
Anders Carlssond74cad82010-12-04 23:53:18 +0000141 void AppendPadding(uint64_t FieldOffsetInBytes,
142 unsigned FieldAlignmentInBytes);
Daniel Dunbar23ee4b72010-03-31 00:11:27 +0000143
Anders Carlssonc1351ca2010-11-09 05:25:47 +0000144 /// getByteArrayType - Returns a byte array type with the given number of
145 /// elements.
146 const llvm::Type *getByteArrayType(uint64_t NumBytes);
147
Daniel Dunbar23ee4b72010-03-31 00:11:27 +0000148 /// AppendBytes - Append a given number of bytes to the record.
149 void AppendBytes(uint64_t NumBytes);
John McCall0217dfc22011-02-15 06:40:56 +0000150 void AppendBytes(CharUnits numBytes) { AppendBytes(numBytes.getQuantity()); }
Daniel Dunbar23ee4b72010-03-31 00:11:27 +0000151
152 /// AppendTailPadding - Append enough tail padding so that the type will have
153 /// the passed size.
154 void AppendTailPadding(uint64_t RecordSize);
155
156 unsigned getTypeAlignment(const llvm::Type *Ty) const;
Daniel Dunbar23ee4b72010-03-31 00:11:27 +0000157
Anders Carlssonacf877b2010-11-28 23:06:23 +0000158 /// getAlignmentAsLLVMStruct - Returns the maximum alignment of all the
159 /// LLVM element types.
160 unsigned getAlignmentAsLLVMStruct() const;
161
John McCall614dbdc2010-08-22 21:01:12 +0000162 /// CheckZeroInitializable - Check if the given type contains a pointer
Daniel Dunbar23ee4b72010-03-31 00:11:27 +0000163 /// to data member.
John McCall614dbdc2010-08-22 21:01:12 +0000164 void CheckZeroInitializable(QualType T);
Daniel Dunbar23ee4b72010-03-31 00:11:27 +0000165
166public:
167 CGRecordLayoutBuilder(CodeGenTypes &Types)
John McCall0217dfc22011-02-15 06:40:56 +0000168 : BaseSubobjectType(0),
169 IsZeroInitializable(true), IsZeroInitializableAsBase(true),
170 Packed(false), Types(Types), Alignment(0),
171 BitsAvailableInLastField(0), NextFieldOffsetInBytes(0) { }
Daniel Dunbar23ee4b72010-03-31 00:11:27 +0000172
173 /// Layout - Will layout a RecordDecl.
174 void Layout(const RecordDecl *D);
175};
176
177}
Daniel Dunbar23ee4b72010-03-31 00:11:27 +0000178
Anders Carlsson307846f2009-07-23 03:17:50 +0000179void CGRecordLayoutBuilder::Layout(const RecordDecl *D) {
Ken Dyck7ad11e72011-02-15 02:32:40 +0000180 Alignment =
181 Types.getContext().getASTRecordLayout(D).getAlignment().getQuantity();
Anders Carlsson09a37742009-09-02 17:51:33 +0000182 Packed = D->hasAttr<PackedAttr>();
Anders Carlsson28a5fa22009-08-08 19:38:24 +0000183
Anders Carlsson697f6592009-07-23 03:43:54 +0000184 if (D->isUnion()) {
185 LayoutUnion(D);
186 return;
187 }
Anders Carlsson68e0b682009-08-08 18:23:56 +0000188
Anders Carlsson307846f2009-07-23 03:17:50 +0000189 if (LayoutFields(D))
190 return;
Mike Stump11289f42009-09-09 15:08:12 +0000191
Anders Carlsson307846f2009-07-23 03:17:50 +0000192 // We weren't able to layout the struct. Try again with a packed struct
Anders Carlssond78fc892009-07-23 17:24:40 +0000193 Packed = true;
Anders Carlssond5d64132009-07-28 17:56:36 +0000194 NextFieldOffsetInBytes = 0;
Anders Carlsson307846f2009-07-23 03:17:50 +0000195 FieldTypes.clear();
John McCall0217dfc22011-02-15 06:40:56 +0000196 Fields.clear();
197 BitFields.clear();
198 NonVirtualBases.clear();
199 VirtualBases.clear();
Mike Stump11289f42009-09-09 15:08:12 +0000200
Anders Carlsson307846f2009-07-23 03:17:50 +0000201 LayoutFields(D);
202}
203
Daniel Dunbarc7f9bba2010-09-02 23:53:28 +0000204CGBitFieldInfo CGBitFieldInfo::MakeInfo(CodeGenTypes &Types,
205 const FieldDecl *FD,
206 uint64_t FieldOffset,
207 uint64_t FieldSize,
208 uint64_t ContainingTypeSizeInBits,
209 unsigned ContainingTypeAlign) {
Daniel Dunbarf9c24f82010-04-12 21:01:28 +0000210 const llvm::Type *Ty = Types.ConvertTypeForMemRecursive(FD->getType());
Daniel Dunbarb935b932010-04-13 20:58:55 +0000211 uint64_t TypeSizeInBytes = Types.getTargetData().getTypeAllocSize(Ty);
212 uint64_t TypeSizeInBits = TypeSizeInBytes * 8;
Daniel Dunbarf9c24f82010-04-12 21:01:28 +0000213
214 bool IsSigned = FD->getType()->isSignedIntegerType();
Daniel Dunbarf9c24f82010-04-12 21:01:28 +0000215
Anders Carlssonbe6f3182010-04-16 16:23:02 +0000216 if (FieldSize > TypeSizeInBits) {
Anders Carlssond5f27b02010-04-17 22:54:57 +0000217 // We have a wide bit-field. The extra bits are only used for padding, so
218 // if we have a bitfield of type T, with size N:
219 //
220 // T t : N;
221 //
222 // We can just assume that it's:
223 //
224 // T t : sizeof(T);
225 //
226 FieldSize = TypeSizeInBits;
Anders Carlssonbe6f3182010-04-16 16:23:02 +0000227 }
228
Daniel Dunbar488f55c2010-04-22 02:35:46 +0000229 // Compute the access components. The policy we use is to start by attempting
230 // to access using the width of the bit-field type itself and to always access
231 // at aligned indices of that type. If such an access would fail because it
232 // extends past the bound of the type, then we reduce size to the next smaller
233 // power of two and retry. The current algorithm assumes pow2 sized types,
234 // although this is easy to fix.
235 //
236 // FIXME: This algorithm is wrong on big-endian systems, I think.
237 assert(llvm::isPowerOf2_32(TypeSizeInBits) && "Unexpected type size!");
238 CGBitFieldInfo::AccessInfo Components[3];
239 unsigned NumComponents = 0;
240 unsigned AccessedTargetBits = 0; // The tumber of target bits accessed.
241 unsigned AccessWidth = TypeSizeInBits; // The current access width to attempt.
Anders Carlssonbe6f3182010-04-16 16:23:02 +0000242
Daniel Dunbar488f55c2010-04-22 02:35:46 +0000243 // Round down from the field offset to find the first access position that is
244 // at an aligned offset of the initial access type.
Daniel Dunbar59813772010-04-22 15:22:33 +0000245 uint64_t AccessStart = FieldOffset - (FieldOffset % AccessWidth);
246
247 // Adjust initial access size to fit within record.
248 while (AccessWidth > 8 &&
249 AccessStart + AccessWidth > ContainingTypeSizeInBits) {
250 AccessWidth >>= 1;
251 AccessStart = FieldOffset - (FieldOffset % AccessWidth);
252 }
Daniel Dunbar9c78d632010-04-15 05:09:32 +0000253
Daniel Dunbar488f55c2010-04-22 02:35:46 +0000254 while (AccessedTargetBits < FieldSize) {
255 // Check that we can access using a type of this size, without reading off
256 // the end of the structure. This can occur with packed structures and
257 // -fno-bitfield-type-align, for example.
258 if (AccessStart + AccessWidth > ContainingTypeSizeInBits) {
259 // If so, reduce access size to the next smaller power-of-two and retry.
260 AccessWidth >>= 1;
261 assert(AccessWidth >= 8 && "Cannot access under byte size!");
262 continue;
263 }
Daniel Dunbarb935b932010-04-13 20:58:55 +0000264
Daniel Dunbar488f55c2010-04-22 02:35:46 +0000265 // Otherwise, add an access component.
Daniel Dunbarb935b932010-04-13 20:58:55 +0000266
Daniel Dunbar488f55c2010-04-22 02:35:46 +0000267 // First, compute the bits inside this access which are part of the
268 // target. We are reading bits [AccessStart, AccessStart + AccessWidth); the
269 // intersection with [FieldOffset, FieldOffset + FieldSize) gives the bits
270 // in the target that we are reading.
Daniel Dunbar59813772010-04-22 15:22:33 +0000271 assert(FieldOffset < AccessStart + AccessWidth && "Invalid access start!");
272 assert(AccessStart < FieldOffset + FieldSize && "Invalid access start!");
Daniel Dunbar488f55c2010-04-22 02:35:46 +0000273 uint64_t AccessBitsInFieldStart = std::max(AccessStart, FieldOffset);
274 uint64_t AccessBitsInFieldSize =
Daniel Dunbar59813772010-04-22 15:22:33 +0000275 std::min(AccessWidth + AccessStart,
276 FieldOffset + FieldSize) - AccessBitsInFieldStart;
Daniel Dunbar5d6c07e2010-04-22 14:56:10 +0000277
Daniel Dunbar488f55c2010-04-22 02:35:46 +0000278 assert(NumComponents < 3 && "Unexpected number of components!");
279 CGBitFieldInfo::AccessInfo &AI = Components[NumComponents++];
280 AI.FieldIndex = 0;
281 // FIXME: We still follow the old access pattern of only using the field
282 // byte offset. We should switch this once we fix the struct layout to be
283 // pretty.
284 AI.FieldByteOffset = AccessStart / 8;
285 AI.FieldBitStart = AccessBitsInFieldStart - AccessStart;
286 AI.AccessWidth = AccessWidth;
Daniel Dunbarfc66e0e2010-04-22 03:17:04 +0000287 AI.AccessAlignment = llvm::MinAlign(ContainingTypeAlign, AccessStart) / 8;
Daniel Dunbar488f55c2010-04-22 02:35:46 +0000288 AI.TargetBitOffset = AccessedTargetBits;
289 AI.TargetBitWidth = AccessBitsInFieldSize;
290
291 AccessStart += AccessWidth;
292 AccessedTargetBits += AI.TargetBitWidth;
Daniel Dunbarb935b932010-04-13 20:58:55 +0000293 }
294
Daniel Dunbar488f55c2010-04-22 02:35:46 +0000295 assert(AccessedTargetBits == FieldSize && "Invalid bit-field access!");
Daniel Dunbar9c78d632010-04-15 05:09:32 +0000296 return CGBitFieldInfo(FieldSize, NumComponents, Components, IsSigned);
Daniel Dunbarf9c24f82010-04-12 21:01:28 +0000297}
298
Daniel Dunbarc7f9bba2010-09-02 23:53:28 +0000299CGBitFieldInfo CGBitFieldInfo::MakeInfo(CodeGenTypes &Types,
300 const FieldDecl *FD,
301 uint64_t FieldOffset,
302 uint64_t FieldSize) {
303 const RecordDecl *RD = FD->getParent();
304 const ASTRecordLayout &RL = Types.getContext().getASTRecordLayout(RD);
Ken Dyckb0fcc592011-02-11 01:54:29 +0000305 uint64_t ContainingTypeSizeInBits = Types.getContext().toBits(RL.getSize());
Ken Dyck7ad11e72011-02-15 02:32:40 +0000306 unsigned ContainingTypeAlign = Types.getContext().toBits(RL.getAlignment());
Daniel Dunbarc7f9bba2010-09-02 23:53:28 +0000307
308 return MakeInfo(Types, FD, FieldOffset, FieldSize, ContainingTypeSizeInBits,
309 ContainingTypeAlign);
310}
311
Anders Carlsson307846f2009-07-23 03:17:50 +0000312void CGRecordLayoutBuilder::LayoutBitField(const FieldDecl *D,
313 uint64_t FieldOffset) {
Mike Stump11289f42009-09-09 15:08:12 +0000314 uint64_t FieldSize =
Anders Carlsson307846f2009-07-23 03:17:50 +0000315 D->getBitWidth()->EvaluateAsInt(Types.getContext()).getZExtValue();
Mike Stump11289f42009-09-09 15:08:12 +0000316
Anders Carlsson307846f2009-07-23 03:17:50 +0000317 if (FieldSize == 0)
318 return;
319
Anders Carlssond5d64132009-07-28 17:56:36 +0000320 uint64_t NextFieldOffset = NextFieldOffsetInBytes * 8;
Anders Carlsson307846f2009-07-23 03:17:50 +0000321 unsigned NumBytesToAppend;
Mike Stump11289f42009-09-09 15:08:12 +0000322
Anders Carlsson307846f2009-07-23 03:17:50 +0000323 if (FieldOffset < NextFieldOffset) {
324 assert(BitsAvailableInLastField && "Bitfield size mismatch!");
Anders Carlssond5d64132009-07-28 17:56:36 +0000325 assert(NextFieldOffsetInBytes && "Must have laid out at least one byte!");
Mike Stump11289f42009-09-09 15:08:12 +0000326
Anders Carlsson307846f2009-07-23 03:17:50 +0000327 // The bitfield begins in the previous bit-field.
Mike Stump11289f42009-09-09 15:08:12 +0000328 NumBytesToAppend =
Anders Carlsson307846f2009-07-23 03:17:50 +0000329 llvm::RoundUpToAlignment(FieldSize - BitsAvailableInLastField, 8) / 8;
330 } else {
331 assert(FieldOffset % 8 == 0 && "Field offset not aligned correctly");
332
333 // Append padding if necessary.
Anders Carlssond74cad82010-12-04 23:53:18 +0000334 AppendPadding(FieldOffset / 8, 1);
Mike Stump11289f42009-09-09 15:08:12 +0000335
336 NumBytesToAppend =
Anders Carlsson307846f2009-07-23 03:17:50 +0000337 llvm::RoundUpToAlignment(FieldSize, 8) / 8;
Mike Stump11289f42009-09-09 15:08:12 +0000338
Anders Carlsson307846f2009-07-23 03:17:50 +0000339 assert(NumBytesToAppend && "No bytes to append!");
340 }
341
Daniel Dunbarf9c24f82010-04-12 21:01:28 +0000342 // Add the bit field info.
John McCall0217dfc22011-02-15 06:40:56 +0000343 BitFields.insert(std::make_pair(D,
344 CGBitFieldInfo::MakeInfo(Types, D, FieldOffset, FieldSize)));
Mike Stump11289f42009-09-09 15:08:12 +0000345
Anders Carlsson307846f2009-07-23 03:17:50 +0000346 AppendBytes(NumBytesToAppend);
Mike Stump11289f42009-09-09 15:08:12 +0000347
Mike Stump11289f42009-09-09 15:08:12 +0000348 BitsAvailableInLastField =
Anders Carlssond5d64132009-07-28 17:56:36 +0000349 NextFieldOffsetInBytes * 8 - (FieldOffset + FieldSize);
Anders Carlsson307846f2009-07-23 03:17:50 +0000350}
351
352bool CGRecordLayoutBuilder::LayoutField(const FieldDecl *D,
353 uint64_t FieldOffset) {
Anders Carlsson307846f2009-07-23 03:17:50 +0000354 // If the field is packed, then we need a packed struct.
Anders Carlsson68e0b682009-08-08 18:23:56 +0000355 if (!Packed && D->hasAttr<PackedAttr>())
Anders Carlsson307846f2009-07-23 03:17:50 +0000356 return false;
357
358 if (D->isBitField()) {
359 // We must use packed structs for unnamed bit fields since they
360 // don't affect the struct alignment.
Anders Carlssond78fc892009-07-23 17:24:40 +0000361 if (!Packed && !D->getDeclName())
Anders Carlsson307846f2009-07-23 03:17:50 +0000362 return false;
Mike Stump11289f42009-09-09 15:08:12 +0000363
Anders Carlsson307846f2009-07-23 03:17:50 +0000364 LayoutBitField(D, FieldOffset);
365 return true;
366 }
Mike Stump11289f42009-09-09 15:08:12 +0000367
John McCall614dbdc2010-08-22 21:01:12 +0000368 CheckZeroInitializable(D->getType());
Daniel Dunbar23ee4b72010-03-31 00:11:27 +0000369
Anders Carlsson307846f2009-07-23 03:17:50 +0000370 assert(FieldOffset % 8 == 0 && "FieldOffset is not on a byte boundary!");
Anders Carlsson307846f2009-07-23 03:17:50 +0000371 uint64_t FieldOffsetInBytes = FieldOffset / 8;
Mike Stump11289f42009-09-09 15:08:12 +0000372
Anders Carlsson19702bb2009-08-04 16:29:15 +0000373 const llvm::Type *Ty = Types.ConvertTypeForMemRecursive(D->getType());
374 unsigned TypeAlignment = getTypeAlignment(Ty);
375
Anders Carlsson28a5fa22009-08-08 19:38:24 +0000376 // If the type alignment is larger then the struct alignment, we must use
377 // a packed struct.
378 if (TypeAlignment > Alignment) {
379 assert(!Packed && "Alignment is wrong even with packed struct!");
380 return false;
381 }
Mike Stump11289f42009-09-09 15:08:12 +0000382
Anders Carlsson28a5fa22009-08-08 19:38:24 +0000383 if (const RecordType *RT = D->getType()->getAs<RecordType>()) {
384 const RecordDecl *RD = cast<RecordDecl>(RT->getDecl());
Daniel Dunbar40130442010-05-27 01:12:46 +0000385 if (const MaxFieldAlignmentAttr *MFAA =
386 RD->getAttr<MaxFieldAlignmentAttr>()) {
387 if (MFAA->getAlignment() != TypeAlignment * 8 && !Packed)
Anders Carlsson28a5fa22009-08-08 19:38:24 +0000388 return false;
389 }
390 }
391
Anders Carlsson19702bb2009-08-04 16:29:15 +0000392 // Round up the field offset to the alignment of the field type.
Mike Stump11289f42009-09-09 15:08:12 +0000393 uint64_t AlignedNextFieldOffsetInBytes =
Anders Carlsson19702bb2009-08-04 16:29:15 +0000394 llvm::RoundUpToAlignment(NextFieldOffsetInBytes, TypeAlignment);
395
396 if (FieldOffsetInBytes < AlignedNextFieldOffsetInBytes) {
397 assert(!Packed && "Could not place field even with packed struct!");
398 return false;
399 }
Mike Stump11289f42009-09-09 15:08:12 +0000400
Anders Carlssond74cad82010-12-04 23:53:18 +0000401 AppendPadding(FieldOffsetInBytes, TypeAlignment);
Mike Stump11289f42009-09-09 15:08:12 +0000402
Anders Carlsson307846f2009-07-23 03:17:50 +0000403 // Now append the field.
John McCall0217dfc22011-02-15 06:40:56 +0000404 Fields[D] = FieldTypes.size();
Anders Carlsson6e853bf2009-07-24 02:45:50 +0000405 AppendField(FieldOffsetInBytes, Ty);
Mike Stump11289f42009-09-09 15:08:12 +0000406
Anders Carlsson307846f2009-07-23 03:17:50 +0000407 return true;
408}
409
Anders Carlsson1de2f572010-04-17 20:49:27 +0000410const llvm::Type *
411CGRecordLayoutBuilder::LayoutUnionField(const FieldDecl *Field,
412 const ASTRecordLayout &Layout) {
413 if (Field->isBitField()) {
414 uint64_t FieldSize =
415 Field->getBitWidth()->EvaluateAsInt(Types.getContext()).getZExtValue();
416
417 // Ignore zero sized bit fields.
418 if (FieldSize == 0)
419 return 0;
420
Daniel Dunbar20b551a2010-04-20 17:52:30 +0000421 const llvm::Type *FieldTy = llvm::Type::getInt8Ty(Types.getLLVMContext());
422 unsigned NumBytesToAppend =
423 llvm::RoundUpToAlignment(FieldSize, 8) / 8;
Anders Carlsson2295f132010-04-17 21:04:52 +0000424
Daniel Dunbar20b551a2010-04-20 17:52:30 +0000425 if (NumBytesToAppend > 1)
426 FieldTy = llvm::ArrayType::get(FieldTy, NumBytesToAppend);
Anders Carlsson2295f132010-04-17 21:04:52 +0000427
Anders Carlsson1de2f572010-04-17 20:49:27 +0000428 // Add the bit field info.
John McCall0217dfc22011-02-15 06:40:56 +0000429 BitFields.insert(std::make_pair(Field,
430 CGBitFieldInfo::MakeInfo(Types, Field, 0, FieldSize)));
Anders Carlsson2295f132010-04-17 21:04:52 +0000431 return FieldTy;
Anders Carlsson1de2f572010-04-17 20:49:27 +0000432 }
Daniel Dunbar20b551a2010-04-20 17:52:30 +0000433
Anders Carlsson1de2f572010-04-17 20:49:27 +0000434 // This is a regular union field.
John McCall0217dfc22011-02-15 06:40:56 +0000435 Fields[Field] = 0;
Anders Carlsson1de2f572010-04-17 20:49:27 +0000436 return Types.ConvertTypeForMemRecursive(Field->getType());
437}
438
Anders Carlsson697f6592009-07-23 03:43:54 +0000439void CGRecordLayoutBuilder::LayoutUnion(const RecordDecl *D) {
440 assert(D->isUnion() && "Can't call LayoutUnion on a non-union record!");
Mike Stump11289f42009-09-09 15:08:12 +0000441
Anders Carlsson697f6592009-07-23 03:43:54 +0000442 const ASTRecordLayout &Layout = Types.getContext().getASTRecordLayout(D);
Mike Stump11289f42009-09-09 15:08:12 +0000443
Anders Carlsson697f6592009-07-23 03:43:54 +0000444 const llvm::Type *Ty = 0;
445 uint64_t Size = 0;
446 unsigned Align = 0;
Mike Stump11289f42009-09-09 15:08:12 +0000447
Anders Carlssonb1ef9912010-01-28 18:22:03 +0000448 bool HasOnlyZeroSizedBitFields = true;
Daniel Dunbar23ee4b72010-03-31 00:11:27 +0000449
Anders Carlsson697f6592009-07-23 03:43:54 +0000450 unsigned FieldNo = 0;
Mike Stump11289f42009-09-09 15:08:12 +0000451 for (RecordDecl::field_iterator Field = D->field_begin(),
Anders Carlsson697f6592009-07-23 03:43:54 +0000452 FieldEnd = D->field_end(); Field != FieldEnd; ++Field, ++FieldNo) {
Mike Stump11289f42009-09-09 15:08:12 +0000453 assert(Layout.getFieldOffset(FieldNo) == 0 &&
Anders Carlsson697f6592009-07-23 03:43:54 +0000454 "Union field offset did not start at the beginning of record!");
Anders Carlsson1de2f572010-04-17 20:49:27 +0000455 const llvm::Type *FieldTy = LayoutUnionField(*Field, Layout);
Anders Carlssonf814ee62009-07-23 04:00:39 +0000456
Anders Carlsson1de2f572010-04-17 20:49:27 +0000457 if (!FieldTy)
458 continue;
Mike Stump11289f42009-09-09 15:08:12 +0000459
Anders Carlssonb1ef9912010-01-28 18:22:03 +0000460 HasOnlyZeroSizedBitFields = false;
Daniel Dunbar23ee4b72010-03-31 00:11:27 +0000461
Anders Carlssone2accf42009-07-23 21:52:03 +0000462 unsigned FieldAlign = Types.getTargetData().getABITypeAlignment(FieldTy);
463 uint64_t FieldSize = Types.getTargetData().getTypeAllocSize(FieldTy);
Mike Stump11289f42009-09-09 15:08:12 +0000464
Anders Carlsson697f6592009-07-23 03:43:54 +0000465 if (FieldAlign < Align)
466 continue;
Mike Stump11289f42009-09-09 15:08:12 +0000467
Anders Carlsson697f6592009-07-23 03:43:54 +0000468 if (FieldAlign > Align || FieldSize > Size) {
469 Ty = FieldTy;
470 Align = FieldAlign;
471 Size = FieldSize;
472 }
473 }
Mike Stump11289f42009-09-09 15:08:12 +0000474
Anders Carlsson697f6592009-07-23 03:43:54 +0000475 // Now add our field.
Anders Carlsson0e912752009-09-03 22:56:02 +0000476 if (Ty) {
Anders Carlsson6e853bf2009-07-24 02:45:50 +0000477 AppendField(0, Ty);
Anders Carlsson0e912752009-09-03 22:56:02 +0000478
Ken Dyck7ad11e72011-02-15 02:32:40 +0000479 if (getTypeAlignment(Ty) > Layout.getAlignment().getQuantity()) {
Anders Carlsson0e912752009-09-03 22:56:02 +0000480 // We need a packed struct.
481 Packed = true;
482 Align = 1;
483 }
484 }
Fariborz Jahaniane8e631c2009-11-06 20:47:40 +0000485 if (!Align) {
Anders Carlssonb1ef9912010-01-28 18:22:03 +0000486 assert(HasOnlyZeroSizedBitFields &&
487 "0-align record did not have all zero-sized bit-fields!");
Fariborz Jahaniane8e631c2009-11-06 20:47:40 +0000488 Align = 1;
489 }
Daniel Dunbar23ee4b72010-03-31 00:11:27 +0000490
Anders Carlsson697f6592009-07-23 03:43:54 +0000491 // Append tail padding.
Ken Dyck89d9f362011-02-10 12:36:29 +0000492 uint64_t RecordSize = Layout.getSize().getQuantity();
493 if (RecordSize > Size)
494 AppendPadding(RecordSize, Align);
Anders Carlsson697f6592009-07-23 03:43:54 +0000495}
496
John McCall0217dfc22011-02-15 06:40:56 +0000497void CGRecordLayoutBuilder::LayoutBase(const CXXRecordDecl *base,
498 uint64_t baseOffsetInBits) {
499 uint64_t baseOffsetInBytes = baseOffsetInBits / 8;
500 AppendPadding(baseOffsetInBytes, 1);
Anders Carlssona518b2a2010-12-04 23:59:48 +0000501
John McCall0217dfc22011-02-15 06:40:56 +0000502 const ASTRecordLayout &baseASTLayout
503 = Types.getContext().getASTRecordLayout(base);
504
505 // FIXME: use a better type than [sizeof(base) x i8].
506 // We could use the base layout's subobject type as the actualy
507 // subobject type in the layout if its size is the nvsize of the
508 // base, or if we'd need padding out to the enclosing object anyhow.
509 AppendBytes(baseASTLayout.getNonVirtualSize());
510}
511
512void CGRecordLayoutBuilder::LayoutNonVirtualBase(const CXXRecordDecl *base,
513 uint64_t baseOffsetInBits) {
514 // Ignore empty bases.
515 if (base->isEmpty()) return;
516
517 const CGRecordLayout &baseLayout = Types.getCGRecordLayout(base);
518 if (IsZeroInitializableAsBase) {
519 assert(IsZeroInitializable &&
520 "class zero-initializable as base but not as complete object");
521
522 IsZeroInitializable = IsZeroInitializableAsBase =
523 baseLayout.isZeroInitializableAsBase();
524 }
525
526 LayoutBase(base, baseOffsetInBits);
527 NonVirtualBases[base] = (FieldTypes.size() - 1);
Anders Carlssona518b2a2010-12-04 23:59:48 +0000528}
529
Anders Carlsson1f95ee32010-11-25 01:59:35 +0000530void
John McCall0217dfc22011-02-15 06:40:56 +0000531CGRecordLayoutBuilder::LayoutVirtualBase(const CXXRecordDecl *base,
532 uint64_t baseOffsetInBits) {
Anders Carlsson1f95ee32010-11-25 01:59:35 +0000533 // Ignore empty bases.
John McCall0217dfc22011-02-15 06:40:56 +0000534 if (base->isEmpty()) return;
Anders Carlsson1f95ee32010-11-25 01:59:35 +0000535
John McCall0217dfc22011-02-15 06:40:56 +0000536 const CGRecordLayout &baseLayout = Types.getCGRecordLayout(base);
537 if (IsZeroInitializable)
538 IsZeroInitializable = baseLayout.isZeroInitializableAsBase();
Anders Carlsson1f95ee32010-11-25 01:59:35 +0000539
John McCall0217dfc22011-02-15 06:40:56 +0000540 LayoutBase(base, baseOffsetInBits);
541 VirtualBases[base] = (FieldTypes.size() - 1);
Anders Carlsson1f95ee32010-11-25 01:59:35 +0000542}
543
Anders Carlssona459adb2010-11-28 19:18:44 +0000544/// LayoutVirtualBases - layout the non-virtual bases of a record decl.
545void
546CGRecordLayoutBuilder::LayoutVirtualBases(const CXXRecordDecl *RD,
547 const ASTRecordLayout &Layout) {
548 for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
549 E = RD->bases_end(); I != E; ++I) {
550 const CXXRecordDecl *BaseDecl =
551 cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());
552
553 // We only want to lay out virtual bases that aren't indirect primary bases
554 // of some other base.
555 if (I->isVirtual() && !IndirectPrimaryBases.count(BaseDecl)) {
556 // Only lay out the base once.
557 if (!LaidOutVirtualBases.insert(BaseDecl))
558 continue;
559
560 uint64_t VBaseOffset = Layout.getVBaseClassOffsetInBits(BaseDecl);
561 LayoutVirtualBase(BaseDecl, VBaseOffset);
562 }
563
564 if (!BaseDecl->getNumVBases()) {
565 // This base isn't interesting since it doesn't have any virtual bases.
566 continue;
567 }
568
569 LayoutVirtualBases(BaseDecl, Layout);
570 }
571}
572
Anders Carlssonaf9e5af2010-05-18 05:12:20 +0000573void
574CGRecordLayoutBuilder::LayoutNonVirtualBases(const CXXRecordDecl *RD,
575 const ASTRecordLayout &Layout) {
576 const CXXRecordDecl *PrimaryBase = Layout.getPrimaryBase();
577
578 // Check if we need to add a vtable pointer.
579 if (RD->isDynamicClass()) {
580 if (!PrimaryBase) {
581 const llvm::Type *FunctionType =
582 llvm::FunctionType::get(llvm::Type::getInt32Ty(Types.getLLVMContext()),
583 /*isVarArg=*/true);
584 const llvm::Type *VTableTy = FunctionType->getPointerTo();
585
586 assert(NextFieldOffsetInBytes == 0 &&
587 "VTable pointer must come first!");
588 AppendField(NextFieldOffsetInBytes, VTableTy->getPointerTo());
589 } else {
Anders Carlsson7f95cd12010-11-24 23:12:57 +0000590 if (!Layout.isPrimaryBaseVirtual())
Anders Carlssonaf9e5af2010-05-18 05:12:20 +0000591 LayoutNonVirtualBase(PrimaryBase, 0);
Anders Carlsson1f95ee32010-11-25 01:59:35 +0000592 else
593 LayoutVirtualBase(PrimaryBase, 0);
Anders Carlssonaf9e5af2010-05-18 05:12:20 +0000594 }
595 }
596
597 // Layout the non-virtual bases.
598 for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
599 E = RD->bases_end(); I != E; ++I) {
600 if (I->isVirtual())
601 continue;
602
603 const CXXRecordDecl *BaseDecl =
604 cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());
605
606 // We've already laid out the primary base.
Anders Carlsson7f95cd12010-11-24 23:12:57 +0000607 if (BaseDecl == PrimaryBase && !Layout.isPrimaryBaseVirtual())
Anders Carlssonaf9e5af2010-05-18 05:12:20 +0000608 continue;
609
Anders Carlssonfd88a612010-10-31 23:22:37 +0000610 LayoutNonVirtualBase(BaseDecl, Layout.getBaseClassOffsetInBits(BaseDecl));
Anders Carlssond681a292009-12-16 17:27:20 +0000611 }
612}
613
Argyrios Kyrtzidis648fcbe2010-12-10 00:11:00 +0000614bool
Anders Carlssonc1351ca2010-11-09 05:25:47 +0000615CGRecordLayoutBuilder::ComputeNonVirtualBaseType(const CXXRecordDecl *RD) {
616 const ASTRecordLayout &Layout = Types.getContext().getASTRecordLayout(RD);
617
Ken Dyckbec02852011-02-08 02:02:47 +0000618 CharUnits NonVirtualSize = Layout.getNonVirtualSize();
619 CharUnits NonVirtualAlign = Layout.getNonVirtualAlign();
Anders Carlssonc1351ca2010-11-09 05:25:47 +0000620 uint64_t AlignedNonVirtualTypeSize =
Ken Dyckbec02852011-02-08 02:02:47 +0000621 NonVirtualSize.RoundUpToAlignment(NonVirtualAlign).getQuantity();
Anders Carlssonc1351ca2010-11-09 05:25:47 +0000622
Anders Carlssonc1351ca2010-11-09 05:25:47 +0000623 // First check if we can use the same fields as for the complete class.
Ken Dyck89d9f362011-02-10 12:36:29 +0000624 uint64_t RecordSize = Layout.getSize().getQuantity();
John McCall0217dfc22011-02-15 06:40:56 +0000625 if (AlignedNonVirtualTypeSize == RecordSize)
Argyrios Kyrtzidis648fcbe2010-12-10 00:11:00 +0000626 return true;
Anders Carlssonc1351ca2010-11-09 05:25:47 +0000627
Anders Carlssonc1351ca2010-11-09 05:25:47 +0000628 // Check if we need padding.
629 uint64_t AlignedNextFieldOffset =
Anders Carlssonacf877b2010-11-28 23:06:23 +0000630 llvm::RoundUpToAlignment(NextFieldOffsetInBytes,
631 getAlignmentAsLLVMStruct());
Anders Carlssonc1351ca2010-11-09 05:25:47 +0000632
John McCall0217dfc22011-02-15 06:40:56 +0000633 if (AlignedNextFieldOffset > AlignedNonVirtualTypeSize) {
634 assert(!Packed && "cannot layout even as packed struct");
Argyrios Kyrtzidis648fcbe2010-12-10 00:11:00 +0000635 return false; // Needs packing.
Anders Carlssonc1351ca2010-11-09 05:25:47 +0000636 }
637
John McCall0217dfc22011-02-15 06:40:56 +0000638 bool needsPadding = (AlignedNonVirtualTypeSize != AlignedNextFieldOffset);
639 if (needsPadding) {
640 uint64_t NumBytes = AlignedNonVirtualTypeSize - AlignedNextFieldOffset;
641 FieldTypes.push_back(getByteArrayType(NumBytes));
642 }
643
644 BaseSubobjectType = llvm::StructType::get(Types.getLLVMContext(),
645 FieldTypes, Packed);
646
647 if (needsPadding) {
648 // Pull the padding back off.
649 FieldTypes.pop_back();
650 }
651
Argyrios Kyrtzidis648fcbe2010-12-10 00:11:00 +0000652 return true;
Anders Carlssonc1351ca2010-11-09 05:25:47 +0000653}
654
Anders Carlsson307846f2009-07-23 03:17:50 +0000655bool CGRecordLayoutBuilder::LayoutFields(const RecordDecl *D) {
656 assert(!D->isUnion() && "Can't call LayoutFields on a union!");
Anders Carlsson28a5fa22009-08-08 19:38:24 +0000657 assert(Alignment && "Did not set alignment!");
Mike Stump11289f42009-09-09 15:08:12 +0000658
Anders Carlsson697f6592009-07-23 03:43:54 +0000659 const ASTRecordLayout &Layout = Types.getContext().getASTRecordLayout(D);
Mike Stump11289f42009-09-09 15:08:12 +0000660
Anders Carlssonc1351ca2010-11-09 05:25:47 +0000661 const CXXRecordDecl *RD = dyn_cast<CXXRecordDecl>(D);
662 if (RD)
Anders Carlssonaf9e5af2010-05-18 05:12:20 +0000663 LayoutNonVirtualBases(RD, Layout);
Daniel Dunbar23ee4b72010-03-31 00:11:27 +0000664
Anders Carlsson307846f2009-07-23 03:17:50 +0000665 unsigned FieldNo = 0;
Fariborz Jahanian7b2b1ec2009-07-27 20:57:45 +0000666
Mike Stump11289f42009-09-09 15:08:12 +0000667 for (RecordDecl::field_iterator Field = D->field_begin(),
Anders Carlsson307846f2009-07-23 03:17:50 +0000668 FieldEnd = D->field_end(); Field != FieldEnd; ++Field, ++FieldNo) {
669 if (!LayoutField(*Field, Layout.getFieldOffset(FieldNo))) {
Mike Stump11289f42009-09-09 15:08:12 +0000670 assert(!Packed &&
Anders Carlsson307846f2009-07-23 03:17:50 +0000671 "Could not layout fields even with a packed LLVM struct!");
672 return false;
673 }
674 }
675
Anders Carlsson1f95ee32010-11-25 01:59:35 +0000676 if (RD) {
Anders Carlssona459adb2010-11-28 19:18:44 +0000677 // We've laid out the non-virtual bases and the fields, now compute the
678 // non-virtual base field types.
Argyrios Kyrtzidis648fcbe2010-12-10 00:11:00 +0000679 if (!ComputeNonVirtualBaseType(RD)) {
680 assert(!Packed && "Could not layout even with a packed LLVM struct!");
681 return false;
682 }
Anders Carlsson1f95ee32010-11-25 01:59:35 +0000683
Anders Carlssona459adb2010-11-28 19:18:44 +0000684 // And lay out the virtual bases.
685 RD->getIndirectPrimaryBases(IndirectPrimaryBases);
686 if (Layout.isPrimaryBaseVirtual())
687 IndirectPrimaryBases.insert(Layout.getPrimaryBase());
688 LayoutVirtualBases(RD, Layout);
689 }
Anders Carlssonc1351ca2010-11-09 05:25:47 +0000690
Anders Carlsson307846f2009-07-23 03:17:50 +0000691 // Append tail padding if necessary.
Ken Dyckb0fcc592011-02-11 01:54:29 +0000692 AppendTailPadding(Types.getContext().toBits(Layout.getSize()));
Mike Stump11289f42009-09-09 15:08:12 +0000693
Anders Carlsson307846f2009-07-23 03:17:50 +0000694 return true;
695}
696
Anders Carlssonb97a3ec2009-07-27 14:55:54 +0000697void CGRecordLayoutBuilder::AppendTailPadding(uint64_t RecordSize) {
698 assert(RecordSize % 8 == 0 && "Invalid record size!");
Mike Stump11289f42009-09-09 15:08:12 +0000699
Anders Carlssonb97a3ec2009-07-27 14:55:54 +0000700 uint64_t RecordSizeInBytes = RecordSize / 8;
Anders Carlssond5d64132009-07-28 17:56:36 +0000701 assert(NextFieldOffsetInBytes <= RecordSizeInBytes && "Size mismatch!");
Mike Stump11289f42009-09-09 15:08:12 +0000702
Daniel Dunbar23ee4b72010-03-31 00:11:27 +0000703 uint64_t AlignedNextFieldOffset =
Anders Carlssonacf877b2010-11-28 23:06:23 +0000704 llvm::RoundUpToAlignment(NextFieldOffsetInBytes,
705 getAlignmentAsLLVMStruct());
Anders Carlsson220bf4f2009-12-08 01:24:23 +0000706
707 if (AlignedNextFieldOffset == RecordSizeInBytes) {
708 // We don't need any padding.
709 return;
710 }
Daniel Dunbar23ee4b72010-03-31 00:11:27 +0000711
Anders Carlssond5d64132009-07-28 17:56:36 +0000712 unsigned NumPadBytes = RecordSizeInBytes - NextFieldOffsetInBytes;
Anders Carlssonb97a3ec2009-07-27 14:55:54 +0000713 AppendBytes(NumPadBytes);
714}
715
Mike Stump11289f42009-09-09 15:08:12 +0000716void CGRecordLayoutBuilder::AppendField(uint64_t FieldOffsetInBytes,
Anders Carlsson307846f2009-07-23 03:17:50 +0000717 const llvm::Type *FieldTy) {
Daniel Dunbarf9c24f82010-04-12 21:01:28 +0000718 uint64_t FieldSizeInBytes = Types.getTargetData().getTypeAllocSize(FieldTy);
Anders Carlsson6e853bf2009-07-24 02:45:50 +0000719
Anders Carlsson307846f2009-07-23 03:17:50 +0000720 FieldTypes.push_back(FieldTy);
Anders Carlsson307846f2009-07-23 03:17:50 +0000721
Anders Carlssond5d64132009-07-28 17:56:36 +0000722 NextFieldOffsetInBytes = FieldOffsetInBytes + FieldSizeInBytes;
Anders Carlsson307846f2009-07-23 03:17:50 +0000723 BitsAvailableInLastField = 0;
724}
725
Mike Stump11289f42009-09-09 15:08:12 +0000726void CGRecordLayoutBuilder::AppendPadding(uint64_t FieldOffsetInBytes,
Anders Carlssond74cad82010-12-04 23:53:18 +0000727 unsigned FieldAlignmentInBytes) {
Anders Carlsson307846f2009-07-23 03:17:50 +0000728 assert(NextFieldOffsetInBytes <= FieldOffsetInBytes &&
729 "Incorrect field layout!");
Mike Stump11289f42009-09-09 15:08:12 +0000730
Anders Carlsson307846f2009-07-23 03:17:50 +0000731 // Round up the field offset to the alignment of the field type.
Mike Stump11289f42009-09-09 15:08:12 +0000732 uint64_t AlignedNextFieldOffsetInBytes =
Anders Carlssond74cad82010-12-04 23:53:18 +0000733 llvm::RoundUpToAlignment(NextFieldOffsetInBytes, FieldAlignmentInBytes);
Anders Carlsson307846f2009-07-23 03:17:50 +0000734
735 if (AlignedNextFieldOffsetInBytes < FieldOffsetInBytes) {
736 // Even with alignment, the field offset is not at the right place,
737 // insert padding.
738 uint64_t PaddingInBytes = FieldOffsetInBytes - NextFieldOffsetInBytes;
739
740 AppendBytes(PaddingInBytes);
741 }
742}
743
Anders Carlssonc1351ca2010-11-09 05:25:47 +0000744const llvm::Type *CGRecordLayoutBuilder::getByteArrayType(uint64_t NumBytes) {
745 assert(NumBytes != 0 && "Empty byte array's aren't allowed.");
Mike Stump11289f42009-09-09 15:08:12 +0000746
Owen Anderson41a75022009-08-13 21:57:51 +0000747 const llvm::Type *Ty = llvm::Type::getInt8Ty(Types.getLLVMContext());
Anders Carlssonb97a3ec2009-07-27 14:55:54 +0000748 if (NumBytes > 1)
Anders Carlsson307846f2009-07-23 03:17:50 +0000749 Ty = llvm::ArrayType::get(Ty, NumBytes);
Mike Stump11289f42009-09-09 15:08:12 +0000750
Anders Carlssonc1351ca2010-11-09 05:25:47 +0000751 return Ty;
752}
753
754void CGRecordLayoutBuilder::AppendBytes(uint64_t NumBytes) {
755 if (NumBytes == 0)
756 return;
757
Anders Carlsson307846f2009-07-23 03:17:50 +0000758 // Append the padding field
Anders Carlssonc1351ca2010-11-09 05:25:47 +0000759 AppendField(NextFieldOffsetInBytes, getByteArrayType(NumBytes));
Anders Carlsson307846f2009-07-23 03:17:50 +0000760}
761
762unsigned CGRecordLayoutBuilder::getTypeAlignment(const llvm::Type *Ty) const {
Anders Carlssond78fc892009-07-23 17:24:40 +0000763 if (Packed)
Anders Carlsson307846f2009-07-23 03:17:50 +0000764 return 1;
Mike Stump11289f42009-09-09 15:08:12 +0000765
Anders Carlsson307846f2009-07-23 03:17:50 +0000766 return Types.getTargetData().getABITypeAlignment(Ty);
767}
768
Anders Carlssonacf877b2010-11-28 23:06:23 +0000769unsigned CGRecordLayoutBuilder::getAlignmentAsLLVMStruct() const {
770 if (Packed)
771 return 1;
772
773 unsigned MaxAlignment = 1;
774 for (size_t i = 0; i != FieldTypes.size(); ++i)
775 MaxAlignment = std::max(MaxAlignment, getTypeAlignment(FieldTypes[i]));
776
777 return MaxAlignment;
778}
779
John McCall0217dfc22011-02-15 06:40:56 +0000780/// Merge in whether a field of the given type is zero-initializable.
John McCall614dbdc2010-08-22 21:01:12 +0000781void CGRecordLayoutBuilder::CheckZeroInitializable(QualType T) {
Anders Carlssond606de72009-08-23 01:25:01 +0000782 // This record already contains a member pointer.
John McCall0217dfc22011-02-15 06:40:56 +0000783 if (!IsZeroInitializableAsBase)
Anders Carlssond606de72009-08-23 01:25:01 +0000784 return;
Mike Stump11289f42009-09-09 15:08:12 +0000785
Anders Carlssond606de72009-08-23 01:25:01 +0000786 // Can only have member pointers if we're compiling C++.
787 if (!Types.getContext().getLangOptions().CPlusPlus)
788 return;
Mike Stump11289f42009-09-09 15:08:12 +0000789
John McCall0217dfc22011-02-15 06:40:56 +0000790 const Type *elementType = T->getBaseElementTypeUnsafe();
Mike Stump11289f42009-09-09 15:08:12 +0000791
John McCall0217dfc22011-02-15 06:40:56 +0000792 if (const MemberPointerType *MPT = elementType->getAs<MemberPointerType>()) {
John McCall614dbdc2010-08-22 21:01:12 +0000793 if (!Types.getCXXABI().isZeroInitializable(MPT))
John McCall0217dfc22011-02-15 06:40:56 +0000794 IsZeroInitializable = IsZeroInitializableAsBase = false;
795 } else if (const RecordType *RT = elementType->getAs<RecordType>()) {
Anders Carlssone8bfe412010-02-02 05:17:25 +0000796 const CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
John McCall0217dfc22011-02-15 06:40:56 +0000797 const CGRecordLayout &Layout = Types.getCGRecordLayout(RD);
798 if (!Layout.isZeroInitializable())
799 IsZeroInitializable = IsZeroInitializableAsBase = false;
Daniel Dunbar23ee4b72010-03-31 00:11:27 +0000800 }
Anders Carlssond606de72009-08-23 01:25:01 +0000801}
802
Daniel Dunbar23ee4b72010-03-31 00:11:27 +0000803CGRecordLayout *CodeGenTypes::ComputeRecordLayout(const RecordDecl *D) {
804 CGRecordLayoutBuilder Builder(*this);
Mike Stump11289f42009-09-09 15:08:12 +0000805
Anders Carlsson307846f2009-07-23 03:17:50 +0000806 Builder.Layout(D);
Anders Carlssone1d5ca52009-07-24 15:20:52 +0000807
Anders Carlsson36e2fa82010-11-24 19:37:16 +0000808 const llvm::StructType *Ty = llvm::StructType::get(getLLVMContext(),
809 Builder.FieldTypes,
810 Builder.Packed);
Mike Stump11289f42009-09-09 15:08:12 +0000811
John McCall0217dfc22011-02-15 06:40:56 +0000812 // If we're in C++, compute the base subobject type.
Anders Carlsson36e2fa82010-11-24 19:37:16 +0000813 const llvm::StructType *BaseTy = 0;
Anders Carlssonc1351ca2010-11-09 05:25:47 +0000814 if (isa<CXXRecordDecl>(D)) {
John McCall0217dfc22011-02-15 06:40:56 +0000815 BaseTy = Builder.BaseSubobjectType;
816 if (!BaseTy) BaseTy = Ty;
Anders Carlssonc1351ca2010-11-09 05:25:47 +0000817 }
818
Daniel Dunbar034299e2010-03-31 01:09:11 +0000819 CGRecordLayout *RL =
John McCall0217dfc22011-02-15 06:40:56 +0000820 new CGRecordLayout(Ty, BaseTy, Builder.IsZeroInitializable,
821 Builder.IsZeroInitializableAsBase);
Daniel Dunbar034299e2010-03-31 01:09:11 +0000822
John McCall0217dfc22011-02-15 06:40:56 +0000823 RL->NonVirtualBases.swap(Builder.NonVirtualBases);
824 RL->CompleteObjectVirtualBases.swap(Builder.VirtualBases);
Anders Carlsson061ca522010-05-18 05:22:06 +0000825
Anders Carlsson307846f2009-07-23 03:17:50 +0000826 // Add all the field numbers.
John McCall0217dfc22011-02-15 06:40:56 +0000827 RL->FieldInfo.swap(Builder.Fields);
Anders Carlsson307846f2009-07-23 03:17:50 +0000828
829 // Add bitfield info.
John McCall0217dfc22011-02-15 06:40:56 +0000830 RL->BitFields.swap(Builder.BitFields);
Mike Stump11289f42009-09-09 15:08:12 +0000831
Daniel Dunbar2ea51832010-04-19 20:44:47 +0000832 // Dump the layout, if requested.
Daniel Dunbarb935b932010-04-13 20:58:55 +0000833 if (getContext().getLangOptions().DumpRecordLayouts) {
Daniel Dunbarccabe482010-04-19 20:44:53 +0000834 llvm::errs() << "\n*** Dumping IRgen Record Layout\n";
Daniel Dunbarb935b932010-04-13 20:58:55 +0000835 llvm::errs() << "Record: ";
836 D->dump();
837 llvm::errs() << "\nLayout: ";
Daniel Dunbarb97bff92010-04-12 18:14:18 +0000838 RL->dump();
Daniel Dunbarb935b932010-04-13 20:58:55 +0000839 }
Daniel Dunbarb97bff92010-04-12 18:14:18 +0000840
Daniel Dunbar488f55c2010-04-22 02:35:46 +0000841#ifndef NDEBUG
Daniel Dunbar2ea51832010-04-19 20:44:47 +0000842 // Verify that the computed LLVM struct size matches the AST layout size.
Anders Carlssonc1351ca2010-11-09 05:25:47 +0000843 const ASTRecordLayout &Layout = getContext().getASTRecordLayout(D);
844
Ken Dyckb0fcc592011-02-11 01:54:29 +0000845 uint64_t TypeSizeInBits = getContext().toBits(Layout.getSize());
Daniel Dunbar488f55c2010-04-22 02:35:46 +0000846 assert(TypeSizeInBits == getTargetData().getTypeAllocSizeInBits(Ty) &&
Daniel Dunbar2ea51832010-04-19 20:44:47 +0000847 "Type size mismatch!");
848
Anders Carlssonc1351ca2010-11-09 05:25:47 +0000849 if (BaseTy) {
Ken Dyckbec02852011-02-08 02:02:47 +0000850 CharUnits NonVirtualSize = Layout.getNonVirtualSize();
851 CharUnits NonVirtualAlign = Layout.getNonVirtualAlign();
852 CharUnits AlignedNonVirtualTypeSize =
853 NonVirtualSize.RoundUpToAlignment(NonVirtualAlign);
854
855 uint64_t AlignedNonVirtualTypeSizeInBits =
Ken Dyckb0fcc592011-02-11 01:54:29 +0000856 getContext().toBits(AlignedNonVirtualTypeSize);
Anders Carlssonc1351ca2010-11-09 05:25:47 +0000857
858 assert(AlignedNonVirtualTypeSizeInBits ==
859 getTargetData().getTypeAllocSizeInBits(BaseTy) &&
860 "Type size mismatch!");
861 }
862
Daniel Dunbar2ba67442010-04-21 19:10:49 +0000863 // Verify that the LLVM and AST field offsets agree.
Daniel Dunbar2ba67442010-04-21 19:10:49 +0000864 const llvm::StructType *ST =
865 dyn_cast<llvm::StructType>(RL->getLLVMType());
866 const llvm::StructLayout *SL = getTargetData().getStructLayout(ST);
867
868 const ASTRecordLayout &AST_RL = getContext().getASTRecordLayout(D);
869 RecordDecl::field_iterator it = D->field_begin();
870 for (unsigned i = 0, e = AST_RL.getFieldCount(); i != e; ++i, ++it) {
871 const FieldDecl *FD = *it;
Daniel Dunbar488f55c2010-04-22 02:35:46 +0000872
873 // For non-bit-fields, just check that the LLVM struct offset matches the
874 // AST offset.
875 if (!FD->isBitField()) {
Daniel Dunbar2ba67442010-04-21 19:10:49 +0000876 unsigned FieldNo = RL->getLLVMFieldNo(FD);
877 assert(AST_RL.getFieldOffset(i) == SL->getElementOffsetInBits(FieldNo) &&
878 "Invalid field offset!");
Daniel Dunbar488f55c2010-04-22 02:35:46 +0000879 continue;
880 }
881
882 // Ignore unnamed bit-fields.
883 if (!FD->getDeclName())
884 continue;
885
886 const CGBitFieldInfo &Info = RL->getBitFieldInfo(FD);
887 for (unsigned i = 0, e = Info.getNumComponents(); i != e; ++i) {
888 const CGBitFieldInfo::AccessInfo &AI = Info.getComponent(i);
889
890 // Verify that every component access is within the structure.
891 uint64_t FieldOffset = SL->getElementOffsetInBits(AI.FieldIndex);
892 uint64_t AccessBitOffset = FieldOffset + AI.FieldByteOffset * 8;
893 assert(AccessBitOffset + AI.AccessWidth <= TypeSizeInBits &&
894 "Invalid bit-field access (out of range)!");
Daniel Dunbar2ba67442010-04-21 19:10:49 +0000895 }
896 }
897#endif
Daniel Dunbar2ea51832010-04-19 20:44:47 +0000898
Daniel Dunbar034299e2010-03-31 01:09:11 +0000899 return RL;
Anders Carlsson307846f2009-07-23 03:17:50 +0000900}
Daniel Dunbarb97bff92010-04-12 18:14:18 +0000901
902void CGRecordLayout::print(llvm::raw_ostream &OS) const {
903 OS << "<CGRecordLayout\n";
John McCall0217dfc22011-02-15 06:40:56 +0000904 OS << " LLVMType:" << *CompleteObjectType << "\n";
905 if (BaseSubobjectType)
906 OS << " NonVirtualBaseLLVMType:" << *BaseSubobjectType << "\n";
John McCall614dbdc2010-08-22 21:01:12 +0000907 OS << " IsZeroInitializable:" << IsZeroInitializable << "\n";
Daniel Dunbarb97bff92010-04-12 18:14:18 +0000908 OS << " BitFields:[\n";
Daniel Dunbarb6f4b052010-04-22 02:35:36 +0000909
910 // Print bit-field infos in declaration order.
911 std::vector<std::pair<unsigned, const CGBitFieldInfo*> > BFIs;
Daniel Dunbarb97bff92010-04-12 18:14:18 +0000912 for (llvm::DenseMap<const FieldDecl*, CGBitFieldInfo>::const_iterator
913 it = BitFields.begin(), ie = BitFields.end();
914 it != ie; ++it) {
Daniel Dunbarb6f4b052010-04-22 02:35:36 +0000915 const RecordDecl *RD = it->first->getParent();
916 unsigned Index = 0;
917 for (RecordDecl::field_iterator
918 it2 = RD->field_begin(); *it2 != it->first; ++it2)
919 ++Index;
920 BFIs.push_back(std::make_pair(Index, &it->second));
921 }
922 llvm::array_pod_sort(BFIs.begin(), BFIs.end());
923 for (unsigned i = 0, e = BFIs.size(); i != e; ++i) {
Daniel Dunbarb935b932010-04-13 20:58:55 +0000924 OS.indent(4);
Daniel Dunbarb6f4b052010-04-22 02:35:36 +0000925 BFIs[i].second->print(OS);
Daniel Dunbarb97bff92010-04-12 18:14:18 +0000926 OS << "\n";
927 }
Daniel Dunbarb6f4b052010-04-22 02:35:36 +0000928
Daniel Dunbarb97bff92010-04-12 18:14:18 +0000929 OS << "]>\n";
930}
931
932void CGRecordLayout::dump() const {
933 print(llvm::errs());
934}
935
936void CGBitFieldInfo::print(llvm::raw_ostream &OS) const {
937 OS << "<CGBitFieldInfo";
Daniel Dunbarb97bff92010-04-12 18:14:18 +0000938 OS << " Size:" << Size;
Daniel Dunbarb935b932010-04-13 20:58:55 +0000939 OS << " IsSigned:" << IsSigned << "\n";
940
941 OS.indent(4 + strlen("<CGBitFieldInfo"));
942 OS << " NumComponents:" << getNumComponents();
943 OS << " Components: [";
944 if (getNumComponents()) {
945 OS << "\n";
946 for (unsigned i = 0, e = getNumComponents(); i != e; ++i) {
947 const AccessInfo &AI = getComponent(i);
948 OS.indent(8);
949 OS << "<AccessInfo"
950 << " FieldIndex:" << AI.FieldIndex
951 << " FieldByteOffset:" << AI.FieldByteOffset
952 << " FieldBitStart:" << AI.FieldBitStart
953 << " AccessWidth:" << AI.AccessWidth << "\n";
954 OS.indent(8 + strlen("<AccessInfo"));
955 OS << " AccessAlignment:" << AI.AccessAlignment
956 << " TargetBitOffset:" << AI.TargetBitOffset
957 << " TargetBitWidth:" << AI.TargetBitWidth
958 << ">\n";
959 }
960 OS.indent(4);
961 }
962 OS << "]>";
Daniel Dunbarb97bff92010-04-12 18:14:18 +0000963}
964
965void CGBitFieldInfo::dump() const {
966 print(llvm::errs());
967}