Daniel Dunbar | 23ee4b7 | 2010-03-31 00:11:27 +0000 | [diff] [blame] | 1 | //===--- CGRecordLayoutBuilder.cpp - CGRecordLayout builder ----*- C++ -*-===// |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
Daniel Dunbar | 23ee4b7 | 2010-03-31 00:11:27 +0000 | [diff] [blame] | 10 | // Builder implementation for CGRecordLayout objects. |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 11 | // |
| 12 | //===----------------------------------------------------------------------===// |
| 13 | |
Daniel Dunbar | 072d0bb | 2010-03-30 22:26:10 +0000 | [diff] [blame] | 14 | #include "CGRecordLayout.h" |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 15 | #include "clang/AST/ASTContext.h" |
| 16 | #include "clang/AST/Attr.h" |
Anders Carlsson | 4131f00 | 2010-11-24 22:50:27 +0000 | [diff] [blame] | 17 | #include "clang/AST/CXXInheritance.h" |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 18 | #include "clang/AST/DeclCXX.h" |
| 19 | #include "clang/AST/Expr.h" |
| 20 | #include "clang/AST/RecordLayout.h" |
Daniel Dunbar | d3f3d93 | 2011-06-21 18:54:46 +0000 | [diff] [blame] | 21 | #include "clang/Frontend/CodeGenOptions.h" |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 22 | #include "CodeGenTypes.h" |
John McCall | 614dbdc | 2010-08-22 21:01:12 +0000 | [diff] [blame] | 23 | #include "CGCXXABI.h" |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 24 | #include "llvm/DerivedTypes.h" |
Daniel Dunbar | b97bff9 | 2010-04-12 18:14:18 +0000 | [diff] [blame] | 25 | #include "llvm/Type.h" |
Daniel Dunbar | 2ba6744 | 2010-04-21 19:10:49 +0000 | [diff] [blame] | 26 | #include "llvm/Support/Debug.h" |
Daniel Dunbar | b97bff9 | 2010-04-12 18:14:18 +0000 | [diff] [blame] | 27 | #include "llvm/Support/raw_ostream.h" |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 28 | #include "llvm/Target/TargetData.h" |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 29 | using namespace clang; |
| 30 | using namespace CodeGen; |
| 31 | |
John McCall | bcd3821 | 2010-11-30 23:17:27 +0000 | [diff] [blame] | 32 | namespace { |
Daniel Dunbar | 23ee4b7 | 2010-03-31 00:11:27 +0000 | [diff] [blame] | 33 | |
| 34 | class CGRecordLayoutBuilder { |
| 35 | public: |
| 36 | /// FieldTypes - Holds the LLVM types that the struct is created from. |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 37 | /// |
Chris Lattner | 0e62c1c | 2011-07-23 10:55:15 +0000 | [diff] [blame] | 38 | SmallVector<llvm::Type *, 16> FieldTypes; |
Daniel Dunbar | 23ee4b7 | 2010-03-31 00:11:27 +0000 | [diff] [blame] | 39 | |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 40 | /// BaseSubobjectType - Holds the LLVM type for the non-virtual part |
Anders Carlsson | c1351ca | 2010-11-09 05:25:47 +0000 | [diff] [blame] | 41 | /// of the struct. For example, consider: |
| 42 | /// |
| 43 | /// struct A { int i; }; |
| 44 | /// struct B { void *v; }; |
| 45 | /// struct C : virtual A, B { }; |
| 46 | /// |
| 47 | /// The LLVM type of C will be |
| 48 | /// %struct.C = type { i32 (...)**, %struct.A, i32, %struct.B } |
| 49 | /// |
| 50 | /// And the LLVM type of the non-virtual base struct will be |
| 51 | /// %struct.C.base = type { i32 (...)**, %struct.A, i32 } |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 52 | /// |
| 53 | /// This only gets initialized if the base subobject type is |
| 54 | /// different from the complete-object type. |
Chris Lattner | a5f58b0 | 2011-07-09 17:41:47 +0000 | [diff] [blame] | 55 | llvm::StructType *BaseSubobjectType; |
Anders Carlsson | c1351ca | 2010-11-09 05:25:47 +0000 | [diff] [blame] | 56 | |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 57 | /// FieldInfo - Holds a field and its corresponding LLVM field number. |
| 58 | llvm::DenseMap<const FieldDecl *, unsigned> Fields; |
Daniel Dunbar | 23ee4b7 | 2010-03-31 00:11:27 +0000 | [diff] [blame] | 59 | |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 60 | /// BitFieldInfo - Holds location and size information about a bit field. |
| 61 | llvm::DenseMap<const FieldDecl *, CGBitFieldInfo> BitFields; |
Daniel Dunbar | 23ee4b7 | 2010-03-31 00:11:27 +0000 | [diff] [blame] | 62 | |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 63 | llvm::DenseMap<const CXXRecordDecl *, unsigned> NonVirtualBases; |
| 64 | llvm::DenseMap<const CXXRecordDecl *, unsigned> VirtualBases; |
Anders Carlsson | 4131f00 | 2010-11-24 22:50:27 +0000 | [diff] [blame] | 65 | |
| 66 | /// IndirectPrimaryBases - Virtual base classes, direct or indirect, that are |
| 67 | /// primary base classes for some other direct or indirect base class. |
| 68 | CXXIndirectPrimaryBaseSet IndirectPrimaryBases; |
| 69 | |
Anders Carlsson | a459adb | 2010-11-28 19:18:44 +0000 | [diff] [blame] | 70 | /// LaidOutVirtualBases - A set of all laid out virtual bases, used to avoid |
| 71 | /// avoid laying out virtual bases more than once. |
| 72 | llvm::SmallPtrSet<const CXXRecordDecl *, 4> LaidOutVirtualBases; |
| 73 | |
John McCall | 614dbdc | 2010-08-22 21:01:12 +0000 | [diff] [blame] | 74 | /// IsZeroInitializable - Whether this struct can be C++ |
| 75 | /// zero-initialized with an LLVM zeroinitializer. |
| 76 | bool IsZeroInitializable; |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 77 | bool IsZeroInitializableAsBase; |
Daniel Dunbar | 23ee4b7 | 2010-03-31 00:11:27 +0000 | [diff] [blame] | 78 | |
| 79 | /// Packed - Whether the resulting LLVM struct will be packed or not. |
| 80 | bool Packed; |
Fariborz Jahanian | bcb23a1 | 2011-04-26 23:52:16 +0000 | [diff] [blame] | 81 | |
| 82 | /// IsMsStruct - Whether ms_struct is in effect or not |
| 83 | bool IsMsStruct; |
Daniel Dunbar | 23ee4b7 | 2010-03-31 00:11:27 +0000 | [diff] [blame] | 84 | |
| 85 | private: |
| 86 | CodeGenTypes &Types; |
| 87 | |
Anders Carlsson | fcaaa69 | 2011-04-17 21:56:13 +0000 | [diff] [blame] | 88 | /// LastLaidOutBaseInfo - Contains the offset and non-virtual size of the |
| 89 | /// last base laid out. Used so that we can replace the last laid out base |
| 90 | /// type with an i8 array if needed. |
| 91 | struct LastLaidOutBaseInfo { |
| 92 | CharUnits Offset; |
| 93 | CharUnits NonVirtualSize; |
| 94 | |
| 95 | bool isValid() const { return !NonVirtualSize.isZero(); } |
| 96 | void invalidate() { NonVirtualSize = CharUnits::Zero(); } |
| 97 | |
| 98 | } LastLaidOutBase; |
| 99 | |
Daniel Dunbar | 23ee4b7 | 2010-03-31 00:11:27 +0000 | [diff] [blame] | 100 | /// Alignment - Contains the alignment of the RecordDecl. |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 101 | CharUnits Alignment; |
Daniel Dunbar | 23ee4b7 | 2010-03-31 00:11:27 +0000 | [diff] [blame] | 102 | |
Daniel Dunbar | 23ee4b7 | 2010-03-31 00:11:27 +0000 | [diff] [blame] | 103 | /// BitsAvailableInLastField - If a bit field spans only part of a LLVM field, |
| 104 | /// this will have the number of bits still available in the field. |
| 105 | char BitsAvailableInLastField; |
| 106 | |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 107 | /// NextFieldOffset - Holds the next field offset. |
| 108 | CharUnits NextFieldOffset; |
Daniel Dunbar | 23ee4b7 | 2010-03-31 00:11:27 +0000 | [diff] [blame] | 109 | |
Anders Carlsson | 1de2f57 | 2010-04-17 20:49:27 +0000 | [diff] [blame] | 110 | /// LayoutUnionField - Will layout a field in an union and return the type |
| 111 | /// that the field will have. |
Chris Lattner | a5f58b0 | 2011-07-09 17:41:47 +0000 | [diff] [blame] | 112 | llvm::Type *LayoutUnionField(const FieldDecl *Field, |
| 113 | const ASTRecordLayout &Layout); |
Anders Carlsson | 1de2f57 | 2010-04-17 20:49:27 +0000 | [diff] [blame] | 114 | |
Daniel Dunbar | 23ee4b7 | 2010-03-31 00:11:27 +0000 | [diff] [blame] | 115 | /// LayoutUnion - Will layout a union RecordDecl. |
| 116 | void LayoutUnion(const RecordDecl *D); |
| 117 | |
| 118 | /// LayoutField - try to layout all fields in the record decl. |
| 119 | /// Returns false if the operation failed because the struct is not packed. |
| 120 | bool LayoutFields(const RecordDecl *D); |
| 121 | |
Anders Carlsson | a518b2a | 2010-12-04 23:59:48 +0000 | [diff] [blame] | 122 | /// Layout a single base, virtual or non-virtual |
Eli Friedman | 3c840aa | 2011-12-12 23:13:20 +0000 | [diff] [blame] | 123 | bool LayoutBase(const CXXRecordDecl *base, |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 124 | const CGRecordLayout &baseLayout, |
| 125 | CharUnits baseOffset); |
Anders Carlsson | a518b2a | 2010-12-04 23:59:48 +0000 | [diff] [blame] | 126 | |
Anders Carlsson | 1f95ee3 | 2010-11-25 01:59:35 +0000 | [diff] [blame] | 127 | /// LayoutVirtualBase - layout a single virtual base. |
Eli Friedman | 3c840aa | 2011-12-12 23:13:20 +0000 | [diff] [blame] | 128 | bool LayoutVirtualBase(const CXXRecordDecl *base, |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 129 | CharUnits baseOffset); |
Anders Carlsson | 1f95ee3 | 2010-11-25 01:59:35 +0000 | [diff] [blame] | 130 | |
Anders Carlsson | a459adb | 2010-11-28 19:18:44 +0000 | [diff] [blame] | 131 | /// LayoutVirtualBases - layout the virtual bases of a record decl. |
Eli Friedman | 3c840aa | 2011-12-12 23:13:20 +0000 | [diff] [blame] | 132 | bool LayoutVirtualBases(const CXXRecordDecl *RD, |
Anders Carlsson | a459adb | 2010-11-28 19:18:44 +0000 | [diff] [blame] | 133 | const ASTRecordLayout &Layout); |
John McCall | 0153cd3 | 2011-11-08 04:01:03 +0000 | [diff] [blame] | 134 | |
| 135 | /// MSLayoutVirtualBases - layout the virtual bases of a record decl, |
| 136 | /// like MSVC. |
Eli Friedman | 3c840aa | 2011-12-12 23:13:20 +0000 | [diff] [blame] | 137 | bool MSLayoutVirtualBases(const CXXRecordDecl *RD, |
John McCall | 0153cd3 | 2011-11-08 04:01:03 +0000 | [diff] [blame] | 138 | const ASTRecordLayout &Layout); |
Anders Carlsson | a459adb | 2010-11-28 19:18:44 +0000 | [diff] [blame] | 139 | |
Anders Carlsson | af9e5af | 2010-05-18 05:12:20 +0000 | [diff] [blame] | 140 | /// LayoutNonVirtualBase - layout a single non-virtual base. |
Eli Friedman | 3c840aa | 2011-12-12 23:13:20 +0000 | [diff] [blame] | 141 | bool LayoutNonVirtualBase(const CXXRecordDecl *base, |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 142 | CharUnits baseOffset); |
Anders Carlsson | af9e5af | 2010-05-18 05:12:20 +0000 | [diff] [blame] | 143 | |
Anders Carlsson | a459adb | 2010-11-28 19:18:44 +0000 | [diff] [blame] | 144 | /// LayoutNonVirtualBases - layout the virtual bases of a record decl. |
Eli Friedman | 3c840aa | 2011-12-12 23:13:20 +0000 | [diff] [blame] | 145 | bool LayoutNonVirtualBases(const CXXRecordDecl *RD, |
Anders Carlsson | af9e5af | 2010-05-18 05:12:20 +0000 | [diff] [blame] | 146 | const ASTRecordLayout &Layout); |
Daniel Dunbar | 23ee4b7 | 2010-03-31 00:11:27 +0000 | [diff] [blame] | 147 | |
Anders Carlsson | c1351ca | 2010-11-09 05:25:47 +0000 | [diff] [blame] | 148 | /// ComputeNonVirtualBaseType - Compute the non-virtual base field types. |
Argyrios Kyrtzidis | 648fcbe | 2010-12-10 00:11:00 +0000 | [diff] [blame] | 149 | bool ComputeNonVirtualBaseType(const CXXRecordDecl *RD); |
Anders Carlsson | c1351ca | 2010-11-09 05:25:47 +0000 | [diff] [blame] | 150 | |
Daniel Dunbar | 23ee4b7 | 2010-03-31 00:11:27 +0000 | [diff] [blame] | 151 | /// LayoutField - layout a single field. Returns false if the operation failed |
| 152 | /// because the current struct is not packed. |
| 153 | bool LayoutField(const FieldDecl *D, uint64_t FieldOffset); |
| 154 | |
| 155 | /// LayoutBitField - layout a single bit field. |
| 156 | void LayoutBitField(const FieldDecl *D, uint64_t FieldOffset); |
| 157 | |
| 158 | /// AppendField - Appends a field with the given offset and type. |
Chris Lattner | a5f58b0 | 2011-07-09 17:41:47 +0000 | [diff] [blame] | 159 | void AppendField(CharUnits fieldOffset, llvm::Type *FieldTy); |
Daniel Dunbar | 23ee4b7 | 2010-03-31 00:11:27 +0000 | [diff] [blame] | 160 | |
Daniel Dunbar | 23ee4b7 | 2010-03-31 00:11:27 +0000 | [diff] [blame] | 161 | /// AppendPadding - Appends enough padding bytes so that the total |
| 162 | /// struct size is a multiple of the field alignment. |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 163 | void AppendPadding(CharUnits fieldOffset, CharUnits fieldAlignment); |
Daniel Dunbar | 23ee4b7 | 2010-03-31 00:11:27 +0000 | [diff] [blame] | 164 | |
Anders Carlsson | fcaaa69 | 2011-04-17 21:56:13 +0000 | [diff] [blame] | 165 | /// ResizeLastBaseFieldIfNecessary - Fields and bases can be laid out in the |
| 166 | /// tail padding of a previous base. If this happens, the type of the previous |
| 167 | /// base needs to be changed to an array of i8. Returns true if the last |
| 168 | /// laid out base was resized. |
| 169 | bool ResizeLastBaseFieldIfNecessary(CharUnits offset); |
| 170 | |
Anders Carlsson | c1351ca | 2010-11-09 05:25:47 +0000 | [diff] [blame] | 171 | /// getByteArrayType - Returns a byte array type with the given number of |
| 172 | /// elements. |
Chris Lattner | a5f58b0 | 2011-07-09 17:41:47 +0000 | [diff] [blame] | 173 | llvm::Type *getByteArrayType(CharUnits NumBytes); |
Anders Carlsson | c1351ca | 2010-11-09 05:25:47 +0000 | [diff] [blame] | 174 | |
Daniel Dunbar | 23ee4b7 | 2010-03-31 00:11:27 +0000 | [diff] [blame] | 175 | /// AppendBytes - Append a given number of bytes to the record. |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 176 | void AppendBytes(CharUnits numBytes); |
Daniel Dunbar | 23ee4b7 | 2010-03-31 00:11:27 +0000 | [diff] [blame] | 177 | |
| 178 | /// AppendTailPadding - Append enough tail padding so that the type will have |
| 179 | /// the passed size. |
Ken Dyck | 272b6fa | 2011-04-24 16:53:44 +0000 | [diff] [blame] | 180 | void AppendTailPadding(CharUnits RecordSize); |
Daniel Dunbar | 23ee4b7 | 2010-03-31 00:11:27 +0000 | [diff] [blame] | 181 | |
Chris Lattner | 2192fe5 | 2011-07-18 04:24:23 +0000 | [diff] [blame] | 182 | CharUnits getTypeAlignment(llvm::Type *Ty) const; |
Daniel Dunbar | 23ee4b7 | 2010-03-31 00:11:27 +0000 | [diff] [blame] | 183 | |
Anders Carlsson | acf877b | 2010-11-28 23:06:23 +0000 | [diff] [blame] | 184 | /// getAlignmentAsLLVMStruct - Returns the maximum alignment of all the |
| 185 | /// LLVM element types. |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 186 | CharUnits getAlignmentAsLLVMStruct() const; |
Anders Carlsson | acf877b | 2010-11-28 23:06:23 +0000 | [diff] [blame] | 187 | |
John McCall | 614dbdc | 2010-08-22 21:01:12 +0000 | [diff] [blame] | 188 | /// CheckZeroInitializable - Check if the given type contains a pointer |
Daniel Dunbar | 23ee4b7 | 2010-03-31 00:11:27 +0000 | [diff] [blame] | 189 | /// to data member. |
John McCall | 614dbdc | 2010-08-22 21:01:12 +0000 | [diff] [blame] | 190 | void CheckZeroInitializable(QualType T); |
Daniel Dunbar | 23ee4b7 | 2010-03-31 00:11:27 +0000 | [diff] [blame] | 191 | |
| 192 | public: |
| 193 | CGRecordLayoutBuilder(CodeGenTypes &Types) |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 194 | : BaseSubobjectType(0), |
| 195 | IsZeroInitializable(true), IsZeroInitializableAsBase(true), |
Fariborz Jahanian | bcb23a1 | 2011-04-26 23:52:16 +0000 | [diff] [blame] | 196 | Packed(false), IsMsStruct(false), |
| 197 | Types(Types), BitsAvailableInLastField(0) { } |
Daniel Dunbar | 23ee4b7 | 2010-03-31 00:11:27 +0000 | [diff] [blame] | 198 | |
| 199 | /// Layout - Will layout a RecordDecl. |
| 200 | void Layout(const RecordDecl *D); |
| 201 | }; |
| 202 | |
| 203 | } |
Daniel Dunbar | 23ee4b7 | 2010-03-31 00:11:27 +0000 | [diff] [blame] | 204 | |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 205 | void CGRecordLayoutBuilder::Layout(const RecordDecl *D) { |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 206 | Alignment = Types.getContext().getASTRecordLayout(D).getAlignment(); |
Anders Carlsson | 09a3774 | 2009-09-02 17:51:33 +0000 | [diff] [blame] | 207 | Packed = D->hasAttr<PackedAttr>(); |
Fariborz Jahanian | bcb23a1 | 2011-04-26 23:52:16 +0000 | [diff] [blame] | 208 | |
| 209 | IsMsStruct = D->hasAttr<MsStructAttr>(); |
Anders Carlsson | 28a5fa2 | 2009-08-08 19:38:24 +0000 | [diff] [blame] | 210 | |
Anders Carlsson | 697f659 | 2009-07-23 03:43:54 +0000 | [diff] [blame] | 211 | if (D->isUnion()) { |
| 212 | LayoutUnion(D); |
| 213 | return; |
| 214 | } |
Anders Carlsson | 68e0b68 | 2009-08-08 18:23:56 +0000 | [diff] [blame] | 215 | |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 216 | if (LayoutFields(D)) |
| 217 | return; |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 218 | |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 219 | // We weren't able to layout the struct. Try again with a packed struct |
Anders Carlsson | d78fc89 | 2009-07-23 17:24:40 +0000 | [diff] [blame] | 220 | Packed = true; |
Anders Carlsson | fcaaa69 | 2011-04-17 21:56:13 +0000 | [diff] [blame] | 221 | LastLaidOutBase.invalidate(); |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 222 | NextFieldOffset = CharUnits::Zero(); |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 223 | FieldTypes.clear(); |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 224 | Fields.clear(); |
| 225 | BitFields.clear(); |
| 226 | NonVirtualBases.clear(); |
| 227 | VirtualBases.clear(); |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 228 | |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 229 | LayoutFields(D); |
| 230 | } |
| 231 | |
Daniel Dunbar | c7f9bba | 2010-09-02 23:53:28 +0000 | [diff] [blame] | 232 | CGBitFieldInfo CGBitFieldInfo::MakeInfo(CodeGenTypes &Types, |
| 233 | const FieldDecl *FD, |
| 234 | uint64_t FieldOffset, |
| 235 | uint64_t FieldSize, |
| 236 | uint64_t ContainingTypeSizeInBits, |
| 237 | unsigned ContainingTypeAlign) { |
Chris Lattner | 2192fe5 | 2011-07-18 04:24:23 +0000 | [diff] [blame] | 238 | llvm::Type *Ty = Types.ConvertTypeForMem(FD->getType()); |
John McCall | 8a3c555 | 2011-02-26 08:41:59 +0000 | [diff] [blame] | 239 | CharUnits TypeSizeInBytes = |
| 240 | CharUnits::fromQuantity(Types.getTargetData().getTypeAllocSize(Ty)); |
| 241 | uint64_t TypeSizeInBits = Types.getContext().toBits(TypeSizeInBytes); |
Daniel Dunbar | f9c24f8 | 2010-04-12 21:01:28 +0000 | [diff] [blame] | 242 | |
Douglas Gregor | 6ab2fa8 | 2011-05-20 16:38:50 +0000 | [diff] [blame] | 243 | bool IsSigned = FD->getType()->isSignedIntegerOrEnumerationType(); |
Daniel Dunbar | f9c24f8 | 2010-04-12 21:01:28 +0000 | [diff] [blame] | 244 | |
Anders Carlsson | be6f318 | 2010-04-16 16:23:02 +0000 | [diff] [blame] | 245 | if (FieldSize > TypeSizeInBits) { |
Anders Carlsson | d5f27b0 | 2010-04-17 22:54:57 +0000 | [diff] [blame] | 246 | // We have a wide bit-field. The extra bits are only used for padding, so |
| 247 | // if we have a bitfield of type T, with size N: |
| 248 | // |
| 249 | // T t : N; |
| 250 | // |
| 251 | // We can just assume that it's: |
| 252 | // |
| 253 | // T t : sizeof(T); |
| 254 | // |
| 255 | FieldSize = TypeSizeInBits; |
Anders Carlsson | be6f318 | 2010-04-16 16:23:02 +0000 | [diff] [blame] | 256 | } |
| 257 | |
Chris Lattner | fb59c7c | 2011-02-17 22:09:58 +0000 | [diff] [blame] | 258 | // in big-endian machines the first fields are in higher bit positions, |
| 259 | // so revert the offset. The byte offsets are reversed(back) later. |
| 260 | if (Types.getTargetData().isBigEndian()) { |
| 261 | FieldOffset = ((ContainingTypeSizeInBits)-FieldOffset-FieldSize); |
| 262 | } |
| 263 | |
Daniel Dunbar | 488f55c | 2010-04-22 02:35:46 +0000 | [diff] [blame] | 264 | // Compute the access components. The policy we use is to start by attempting |
| 265 | // to access using the width of the bit-field type itself and to always access |
| 266 | // at aligned indices of that type. If such an access would fail because it |
| 267 | // extends past the bound of the type, then we reduce size to the next smaller |
| 268 | // power of two and retry. The current algorithm assumes pow2 sized types, |
| 269 | // although this is easy to fix. |
| 270 | // |
Daniel Dunbar | 488f55c | 2010-04-22 02:35:46 +0000 | [diff] [blame] | 271 | assert(llvm::isPowerOf2_32(TypeSizeInBits) && "Unexpected type size!"); |
| 272 | CGBitFieldInfo::AccessInfo Components[3]; |
| 273 | unsigned NumComponents = 0; |
Nick Lewycky | d2348d8 | 2011-03-22 17:35:47 +0000 | [diff] [blame] | 274 | unsigned AccessedTargetBits = 0; // The number of target bits accessed. |
Daniel Dunbar | 488f55c | 2010-04-22 02:35:46 +0000 | [diff] [blame] | 275 | unsigned AccessWidth = TypeSizeInBits; // The current access width to attempt. |
Anders Carlsson | be6f318 | 2010-04-16 16:23:02 +0000 | [diff] [blame] | 276 | |
Daniel Dunbar | d3f3d93 | 2011-06-21 18:54:46 +0000 | [diff] [blame] | 277 | // If requested, widen the initial bit-field access to be register sized. The |
| 278 | // theory is that this is most likely to allow multiple accesses into the same |
| 279 | // structure to be coalesced, and that the backend should be smart enough to |
| 280 | // narrow the store if no coalescing is ever done. |
| 281 | // |
| 282 | // The subsequent code will handle align these access to common boundaries and |
| 283 | // guaranteeing that we do not access past the end of the structure. |
| 284 | if (Types.getCodeGenOpts().UseRegisterSizedBitfieldAccess) { |
| 285 | if (AccessWidth < Types.getTarget().getRegisterWidth()) |
| 286 | AccessWidth = Types.getTarget().getRegisterWidth(); |
| 287 | } |
| 288 | |
Daniel Dunbar | 488f55c | 2010-04-22 02:35:46 +0000 | [diff] [blame] | 289 | // Round down from the field offset to find the first access position that is |
| 290 | // at an aligned offset of the initial access type. |
Daniel Dunbar | 5981377 | 2010-04-22 15:22:33 +0000 | [diff] [blame] | 291 | uint64_t AccessStart = FieldOffset - (FieldOffset % AccessWidth); |
| 292 | |
| 293 | // Adjust initial access size to fit within record. |
John McCall | 8a3c555 | 2011-02-26 08:41:59 +0000 | [diff] [blame] | 294 | while (AccessWidth > Types.getTarget().getCharWidth() && |
Daniel Dunbar | 5981377 | 2010-04-22 15:22:33 +0000 | [diff] [blame] | 295 | AccessStart + AccessWidth > ContainingTypeSizeInBits) { |
| 296 | AccessWidth >>= 1; |
| 297 | AccessStart = FieldOffset - (FieldOffset % AccessWidth); |
| 298 | } |
Daniel Dunbar | 9c78d63 | 2010-04-15 05:09:32 +0000 | [diff] [blame] | 299 | |
Daniel Dunbar | 488f55c | 2010-04-22 02:35:46 +0000 | [diff] [blame] | 300 | while (AccessedTargetBits < FieldSize) { |
| 301 | // Check that we can access using a type of this size, without reading off |
| 302 | // the end of the structure. This can occur with packed structures and |
| 303 | // -fno-bitfield-type-align, for example. |
| 304 | if (AccessStart + AccessWidth > ContainingTypeSizeInBits) { |
| 305 | // If so, reduce access size to the next smaller power-of-two and retry. |
| 306 | AccessWidth >>= 1; |
John McCall | 8a3c555 | 2011-02-26 08:41:59 +0000 | [diff] [blame] | 307 | assert(AccessWidth >= Types.getTarget().getCharWidth() |
| 308 | && "Cannot access under byte size!"); |
Daniel Dunbar | 488f55c | 2010-04-22 02:35:46 +0000 | [diff] [blame] | 309 | continue; |
| 310 | } |
Daniel Dunbar | b935b93 | 2010-04-13 20:58:55 +0000 | [diff] [blame] | 311 | |
Daniel Dunbar | 488f55c | 2010-04-22 02:35:46 +0000 | [diff] [blame] | 312 | // Otherwise, add an access component. |
Daniel Dunbar | b935b93 | 2010-04-13 20:58:55 +0000 | [diff] [blame] | 313 | |
Daniel Dunbar | 488f55c | 2010-04-22 02:35:46 +0000 | [diff] [blame] | 314 | // First, compute the bits inside this access which are part of the |
| 315 | // target. We are reading bits [AccessStart, AccessStart + AccessWidth); the |
| 316 | // intersection with [FieldOffset, FieldOffset + FieldSize) gives the bits |
| 317 | // in the target that we are reading. |
Daniel Dunbar | 5981377 | 2010-04-22 15:22:33 +0000 | [diff] [blame] | 318 | assert(FieldOffset < AccessStart + AccessWidth && "Invalid access start!"); |
| 319 | assert(AccessStart < FieldOffset + FieldSize && "Invalid access start!"); |
Daniel Dunbar | 488f55c | 2010-04-22 02:35:46 +0000 | [diff] [blame] | 320 | uint64_t AccessBitsInFieldStart = std::max(AccessStart, FieldOffset); |
| 321 | uint64_t AccessBitsInFieldSize = |
Daniel Dunbar | 5981377 | 2010-04-22 15:22:33 +0000 | [diff] [blame] | 322 | std::min(AccessWidth + AccessStart, |
| 323 | FieldOffset + FieldSize) - AccessBitsInFieldStart; |
Daniel Dunbar | 5d6c07e | 2010-04-22 14:56:10 +0000 | [diff] [blame] | 324 | |
Daniel Dunbar | 488f55c | 2010-04-22 02:35:46 +0000 | [diff] [blame] | 325 | assert(NumComponents < 3 && "Unexpected number of components!"); |
| 326 | CGBitFieldInfo::AccessInfo &AI = Components[NumComponents++]; |
| 327 | AI.FieldIndex = 0; |
| 328 | // FIXME: We still follow the old access pattern of only using the field |
| 329 | // byte offset. We should switch this once we fix the struct layout to be |
| 330 | // pretty. |
Chris Lattner | fb59c7c | 2011-02-17 22:09:58 +0000 | [diff] [blame] | 331 | |
| 332 | // on big-endian machines we reverted the bit offset because first fields are |
| 333 | // in higher bits. But this also reverts the bytes, so fix this here by reverting |
| 334 | // the byte offset on big-endian machines. |
| 335 | if (Types.getTargetData().isBigEndian()) { |
Ken Dyck | f76759c | 2011-04-24 10:04:59 +0000 | [diff] [blame] | 336 | AI.FieldByteOffset = Types.getContext().toCharUnitsFromBits( |
| 337 | ContainingTypeSizeInBits - AccessStart - AccessWidth); |
Chris Lattner | fb59c7c | 2011-02-17 22:09:58 +0000 | [diff] [blame] | 338 | } else { |
Ken Dyck | f76759c | 2011-04-24 10:04:59 +0000 | [diff] [blame] | 339 | AI.FieldByteOffset = Types.getContext().toCharUnitsFromBits(AccessStart); |
Chris Lattner | fb59c7c | 2011-02-17 22:09:58 +0000 | [diff] [blame] | 340 | } |
Daniel Dunbar | 488f55c | 2010-04-22 02:35:46 +0000 | [diff] [blame] | 341 | AI.FieldBitStart = AccessBitsInFieldStart - AccessStart; |
| 342 | AI.AccessWidth = AccessWidth; |
Ken Dyck | 27337a8 | 2011-04-24 10:13:17 +0000 | [diff] [blame] | 343 | AI.AccessAlignment = Types.getContext().toCharUnitsFromBits( |
| 344 | llvm::MinAlign(ContainingTypeAlign, AccessStart)); |
Daniel Dunbar | 488f55c | 2010-04-22 02:35:46 +0000 | [diff] [blame] | 345 | AI.TargetBitOffset = AccessedTargetBits; |
| 346 | AI.TargetBitWidth = AccessBitsInFieldSize; |
| 347 | |
| 348 | AccessStart += AccessWidth; |
| 349 | AccessedTargetBits += AI.TargetBitWidth; |
Daniel Dunbar | b935b93 | 2010-04-13 20:58:55 +0000 | [diff] [blame] | 350 | } |
| 351 | |
Daniel Dunbar | 488f55c | 2010-04-22 02:35:46 +0000 | [diff] [blame] | 352 | assert(AccessedTargetBits == FieldSize && "Invalid bit-field access!"); |
Daniel Dunbar | 9c78d63 | 2010-04-15 05:09:32 +0000 | [diff] [blame] | 353 | return CGBitFieldInfo(FieldSize, NumComponents, Components, IsSigned); |
Daniel Dunbar | f9c24f8 | 2010-04-12 21:01:28 +0000 | [diff] [blame] | 354 | } |
| 355 | |
Daniel Dunbar | c7f9bba | 2010-09-02 23:53:28 +0000 | [diff] [blame] | 356 | CGBitFieldInfo CGBitFieldInfo::MakeInfo(CodeGenTypes &Types, |
| 357 | const FieldDecl *FD, |
| 358 | uint64_t FieldOffset, |
| 359 | uint64_t FieldSize) { |
| 360 | const RecordDecl *RD = FD->getParent(); |
| 361 | const ASTRecordLayout &RL = Types.getContext().getASTRecordLayout(RD); |
Ken Dyck | b0fcc59 | 2011-02-11 01:54:29 +0000 | [diff] [blame] | 362 | uint64_t ContainingTypeSizeInBits = Types.getContext().toBits(RL.getSize()); |
Ken Dyck | 7ad11e7 | 2011-02-15 02:32:40 +0000 | [diff] [blame] | 363 | unsigned ContainingTypeAlign = Types.getContext().toBits(RL.getAlignment()); |
Daniel Dunbar | c7f9bba | 2010-09-02 23:53:28 +0000 | [diff] [blame] | 364 | |
| 365 | return MakeInfo(Types, FD, FieldOffset, FieldSize, ContainingTypeSizeInBits, |
| 366 | ContainingTypeAlign); |
| 367 | } |
| 368 | |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 369 | void CGRecordLayoutBuilder::LayoutBitField(const FieldDecl *D, |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 370 | uint64_t fieldOffset) { |
Richard Smith | caf3390 | 2011-10-10 18:28:20 +0000 | [diff] [blame] | 371 | uint64_t fieldSize = D->getBitWidthValue(Types.getContext()); |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 372 | |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 373 | if (fieldSize == 0) |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 374 | return; |
| 375 | |
John McCall | 8a3c555 | 2011-02-26 08:41:59 +0000 | [diff] [blame] | 376 | uint64_t nextFieldOffsetInBits = Types.getContext().toBits(NextFieldOffset); |
Ken Dyck | 345a6de | 2011-04-24 16:40:29 +0000 | [diff] [blame] | 377 | CharUnits numBytesToAppend; |
Douglas Gregor | e8bbc12 | 2011-09-02 00:18:52 +0000 | [diff] [blame] | 378 | unsigned charAlign = Types.getContext().getTargetInfo().getCharAlign(); |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 379 | |
Anders Carlsson | fcaaa69 | 2011-04-17 21:56:13 +0000 | [diff] [blame] | 380 | if (fieldOffset < nextFieldOffsetInBits && !BitsAvailableInLastField) { |
Ken Dyck | 345a6de | 2011-04-24 16:40:29 +0000 | [diff] [blame] | 381 | assert(fieldOffset % charAlign == 0 && |
| 382 | "Field offset not aligned correctly"); |
Anders Carlsson | fcaaa69 | 2011-04-17 21:56:13 +0000 | [diff] [blame] | 383 | |
| 384 | CharUnits fieldOffsetInCharUnits = |
| 385 | Types.getContext().toCharUnitsFromBits(fieldOffset); |
| 386 | |
| 387 | // Try to resize the last base field. |
| 388 | if (ResizeLastBaseFieldIfNecessary(fieldOffsetInCharUnits)) |
| 389 | nextFieldOffsetInBits = Types.getContext().toBits(NextFieldOffset); |
| 390 | } |
| 391 | |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 392 | if (fieldOffset < nextFieldOffsetInBits) { |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 393 | assert(BitsAvailableInLastField && "Bitfield size mismatch!"); |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 394 | assert(!NextFieldOffset.isZero() && "Must have laid out at least one byte"); |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 395 | |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 396 | // The bitfield begins in the previous bit-field. |
Ken Dyck | 345a6de | 2011-04-24 16:40:29 +0000 | [diff] [blame] | 397 | numBytesToAppend = Types.getContext().toCharUnitsFromBits( |
| 398 | llvm::RoundUpToAlignment(fieldSize - BitsAvailableInLastField, |
| 399 | charAlign)); |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 400 | } else { |
Ken Dyck | 345a6de | 2011-04-24 16:40:29 +0000 | [diff] [blame] | 401 | assert(fieldOffset % charAlign == 0 && |
| 402 | "Field offset not aligned correctly"); |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 403 | |
| 404 | // Append padding if necessary. |
Ken Dyck | 345a6de | 2011-04-24 16:40:29 +0000 | [diff] [blame] | 405 | AppendPadding(Types.getContext().toCharUnitsFromBits(fieldOffset), |
| 406 | CharUnits::One()); |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 407 | |
Ken Dyck | 345a6de | 2011-04-24 16:40:29 +0000 | [diff] [blame] | 408 | numBytesToAppend = Types.getContext().toCharUnitsFromBits( |
| 409 | llvm::RoundUpToAlignment(fieldSize, charAlign)); |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 410 | |
Ken Dyck | 345a6de | 2011-04-24 16:40:29 +0000 | [diff] [blame] | 411 | assert(!numBytesToAppend.isZero() && "No bytes to append!"); |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 412 | } |
| 413 | |
Daniel Dunbar | f9c24f8 | 2010-04-12 21:01:28 +0000 | [diff] [blame] | 414 | // Add the bit field info. |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 415 | BitFields.insert(std::make_pair(D, |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 416 | CGBitFieldInfo::MakeInfo(Types, D, fieldOffset, fieldSize))); |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 417 | |
Ken Dyck | 345a6de | 2011-04-24 16:40:29 +0000 | [diff] [blame] | 418 | AppendBytes(numBytesToAppend); |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 419 | |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 420 | BitsAvailableInLastField = |
Ken Dyck | 345a6de | 2011-04-24 16:40:29 +0000 | [diff] [blame] | 421 | Types.getContext().toBits(NextFieldOffset) - (fieldOffset + fieldSize); |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 422 | } |
| 423 | |
| 424 | bool CGRecordLayoutBuilder::LayoutField(const FieldDecl *D, |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 425 | uint64_t fieldOffset) { |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 426 | // If the field is packed, then we need a packed struct. |
Anders Carlsson | 68e0b68 | 2009-08-08 18:23:56 +0000 | [diff] [blame] | 427 | if (!Packed && D->hasAttr<PackedAttr>()) |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 428 | return false; |
| 429 | |
| 430 | if (D->isBitField()) { |
| 431 | // We must use packed structs for unnamed bit fields since they |
| 432 | // don't affect the struct alignment. |
Anders Carlsson | d78fc89 | 2009-07-23 17:24:40 +0000 | [diff] [blame] | 433 | if (!Packed && !D->getDeclName()) |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 434 | return false; |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 435 | |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 436 | LayoutBitField(D, fieldOffset); |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 437 | return true; |
| 438 | } |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 439 | |
John McCall | 614dbdc | 2010-08-22 21:01:12 +0000 | [diff] [blame] | 440 | CheckZeroInitializable(D->getType()); |
Daniel Dunbar | 23ee4b7 | 2010-03-31 00:11:27 +0000 | [diff] [blame] | 441 | |
John McCall | 8a3c555 | 2011-02-26 08:41:59 +0000 | [diff] [blame] | 442 | assert(fieldOffset % Types.getTarget().getCharWidth() == 0 |
| 443 | && "field offset is not on a byte boundary!"); |
| 444 | CharUnits fieldOffsetInBytes |
| 445 | = Types.getContext().toCharUnitsFromBits(fieldOffset); |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 446 | |
Chris Lattner | a5f58b0 | 2011-07-09 17:41:47 +0000 | [diff] [blame] | 447 | llvm::Type *Ty = Types.ConvertTypeForMem(D->getType()); |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 448 | CharUnits typeAlignment = getTypeAlignment(Ty); |
Anders Carlsson | 19702bb | 2009-08-04 16:29:15 +0000 | [diff] [blame] | 449 | |
Anders Carlsson | 28a5fa2 | 2009-08-08 19:38:24 +0000 | [diff] [blame] | 450 | // If the type alignment is larger then the struct alignment, we must use |
| 451 | // a packed struct. |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 452 | if (typeAlignment > Alignment) { |
Anders Carlsson | 28a5fa2 | 2009-08-08 19:38:24 +0000 | [diff] [blame] | 453 | assert(!Packed && "Alignment is wrong even with packed struct!"); |
| 454 | return false; |
| 455 | } |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 456 | |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 457 | if (!Packed) { |
| 458 | if (const RecordType *RT = D->getType()->getAs<RecordType>()) { |
| 459 | const RecordDecl *RD = cast<RecordDecl>(RT->getDecl()); |
| 460 | if (const MaxFieldAlignmentAttr *MFAA = |
| 461 | RD->getAttr<MaxFieldAlignmentAttr>()) { |
John McCall | 8a3c555 | 2011-02-26 08:41:59 +0000 | [diff] [blame] | 462 | if (MFAA->getAlignment() != Types.getContext().toBits(typeAlignment)) |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 463 | return false; |
| 464 | } |
Anders Carlsson | 28a5fa2 | 2009-08-08 19:38:24 +0000 | [diff] [blame] | 465 | } |
| 466 | } |
| 467 | |
Anders Carlsson | 19702bb | 2009-08-04 16:29:15 +0000 | [diff] [blame] | 468 | // Round up the field offset to the alignment of the field type. |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 469 | CharUnits alignedNextFieldOffsetInBytes = |
| 470 | NextFieldOffset.RoundUpToAlignment(typeAlignment); |
Anders Carlsson | 19702bb | 2009-08-04 16:29:15 +0000 | [diff] [blame] | 471 | |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 472 | if (fieldOffsetInBytes < alignedNextFieldOffsetInBytes) { |
Anders Carlsson | fcaaa69 | 2011-04-17 21:56:13 +0000 | [diff] [blame] | 473 | // Try to resize the last base field. |
| 474 | if (ResizeLastBaseFieldIfNecessary(fieldOffsetInBytes)) { |
| 475 | alignedNextFieldOffsetInBytes = |
| 476 | NextFieldOffset.RoundUpToAlignment(typeAlignment); |
| 477 | } |
| 478 | } |
| 479 | |
| 480 | if (fieldOffsetInBytes < alignedNextFieldOffsetInBytes) { |
Anders Carlsson | 19702bb | 2009-08-04 16:29:15 +0000 | [diff] [blame] | 481 | assert(!Packed && "Could not place field even with packed struct!"); |
| 482 | return false; |
| 483 | } |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 484 | |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 485 | AppendPadding(fieldOffsetInBytes, typeAlignment); |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 486 | |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 487 | // Now append the field. |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 488 | Fields[D] = FieldTypes.size(); |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 489 | AppendField(fieldOffsetInBytes, Ty); |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 490 | |
Anders Carlsson | fcaaa69 | 2011-04-17 21:56:13 +0000 | [diff] [blame] | 491 | LastLaidOutBase.invalidate(); |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 492 | return true; |
| 493 | } |
| 494 | |
Chris Lattner | a5f58b0 | 2011-07-09 17:41:47 +0000 | [diff] [blame] | 495 | llvm::Type * |
Anders Carlsson | 1de2f57 | 2010-04-17 20:49:27 +0000 | [diff] [blame] | 496 | CGRecordLayoutBuilder::LayoutUnionField(const FieldDecl *Field, |
| 497 | const ASTRecordLayout &Layout) { |
| 498 | if (Field->isBitField()) { |
Richard Smith | caf3390 | 2011-10-10 18:28:20 +0000 | [diff] [blame] | 499 | uint64_t FieldSize = Field->getBitWidthValue(Types.getContext()); |
Anders Carlsson | 1de2f57 | 2010-04-17 20:49:27 +0000 | [diff] [blame] | 500 | |
| 501 | // Ignore zero sized bit fields. |
| 502 | if (FieldSize == 0) |
| 503 | return 0; |
| 504 | |
Chris Lattner | a5f58b0 | 2011-07-09 17:41:47 +0000 | [diff] [blame] | 505 | llvm::Type *FieldTy = llvm::Type::getInt8Ty(Types.getLLVMContext()); |
Ken Dyck | 7a0b19f | 2011-04-24 16:47:33 +0000 | [diff] [blame] | 506 | CharUnits NumBytesToAppend = Types.getContext().toCharUnitsFromBits( |
| 507 | llvm::RoundUpToAlignment(FieldSize, |
Douglas Gregor | e8bbc12 | 2011-09-02 00:18:52 +0000 | [diff] [blame] | 508 | Types.getContext().getTargetInfo().getCharAlign())); |
Anders Carlsson | 2295f13 | 2010-04-17 21:04:52 +0000 | [diff] [blame] | 509 | |
Ken Dyck | 7a0b19f | 2011-04-24 16:47:33 +0000 | [diff] [blame] | 510 | if (NumBytesToAppend > CharUnits::One()) |
| 511 | FieldTy = llvm::ArrayType::get(FieldTy, NumBytesToAppend.getQuantity()); |
Anders Carlsson | 2295f13 | 2010-04-17 21:04:52 +0000 | [diff] [blame] | 512 | |
Anders Carlsson | 1de2f57 | 2010-04-17 20:49:27 +0000 | [diff] [blame] | 513 | // Add the bit field info. |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 514 | BitFields.insert(std::make_pair(Field, |
| 515 | CGBitFieldInfo::MakeInfo(Types, Field, 0, FieldSize))); |
Anders Carlsson | 2295f13 | 2010-04-17 21:04:52 +0000 | [diff] [blame] | 516 | return FieldTy; |
Anders Carlsson | 1de2f57 | 2010-04-17 20:49:27 +0000 | [diff] [blame] | 517 | } |
Daniel Dunbar | 20b551a | 2010-04-20 17:52:30 +0000 | [diff] [blame] | 518 | |
Anders Carlsson | 1de2f57 | 2010-04-17 20:49:27 +0000 | [diff] [blame] | 519 | // This is a regular union field. |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 520 | Fields[Field] = 0; |
Chris Lattner | a5f58b0 | 2011-07-09 17:41:47 +0000 | [diff] [blame] | 521 | return Types.ConvertTypeForMem(Field->getType()); |
Anders Carlsson | 1de2f57 | 2010-04-17 20:49:27 +0000 | [diff] [blame] | 522 | } |
| 523 | |
Anders Carlsson | 697f659 | 2009-07-23 03:43:54 +0000 | [diff] [blame] | 524 | void CGRecordLayoutBuilder::LayoutUnion(const RecordDecl *D) { |
| 525 | assert(D->isUnion() && "Can't call LayoutUnion on a non-union record!"); |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 526 | |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 527 | const ASTRecordLayout &layout = Types.getContext().getASTRecordLayout(D); |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 528 | |
Chris Lattner | a5f58b0 | 2011-07-09 17:41:47 +0000 | [diff] [blame] | 529 | llvm::Type *unionType = 0; |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 530 | CharUnits unionSize = CharUnits::Zero(); |
| 531 | CharUnits unionAlign = CharUnits::Zero(); |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 532 | |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 533 | bool hasOnlyZeroSizedBitFields = true; |
Eli Friedman | dae858a | 2011-12-07 01:30:11 +0000 | [diff] [blame] | 534 | bool checkedFirstFieldZeroInit = false; |
Daniel Dunbar | 23ee4b7 | 2010-03-31 00:11:27 +0000 | [diff] [blame] | 535 | |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 536 | unsigned fieldNo = 0; |
| 537 | for (RecordDecl::field_iterator field = D->field_begin(), |
| 538 | fieldEnd = D->field_end(); field != fieldEnd; ++field, ++fieldNo) { |
| 539 | assert(layout.getFieldOffset(fieldNo) == 0 && |
Anders Carlsson | 697f659 | 2009-07-23 03:43:54 +0000 | [diff] [blame] | 540 | "Union field offset did not start at the beginning of record!"); |
Chris Lattner | a5f58b0 | 2011-07-09 17:41:47 +0000 | [diff] [blame] | 541 | llvm::Type *fieldType = LayoutUnionField(*field, layout); |
Anders Carlsson | f814ee6 | 2009-07-23 04:00:39 +0000 | [diff] [blame] | 542 | |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 543 | if (!fieldType) |
Anders Carlsson | 1de2f57 | 2010-04-17 20:49:27 +0000 | [diff] [blame] | 544 | continue; |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 545 | |
Eli Friedman | dae858a | 2011-12-07 01:30:11 +0000 | [diff] [blame] | 546 | if (field->getDeclName() && !checkedFirstFieldZeroInit) { |
| 547 | CheckZeroInitializable(field->getType()); |
| 548 | checkedFirstFieldZeroInit = true; |
| 549 | } |
| 550 | |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 551 | hasOnlyZeroSizedBitFields = false; |
Daniel Dunbar | 23ee4b7 | 2010-03-31 00:11:27 +0000 | [diff] [blame] | 552 | |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 553 | CharUnits fieldAlign = CharUnits::fromQuantity( |
| 554 | Types.getTargetData().getABITypeAlignment(fieldType)); |
| 555 | CharUnits fieldSize = CharUnits::fromQuantity( |
| 556 | Types.getTargetData().getTypeAllocSize(fieldType)); |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 557 | |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 558 | if (fieldAlign < unionAlign) |
Anders Carlsson | 697f659 | 2009-07-23 03:43:54 +0000 | [diff] [blame] | 559 | continue; |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 560 | |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 561 | if (fieldAlign > unionAlign || fieldSize > unionSize) { |
| 562 | unionType = fieldType; |
| 563 | unionAlign = fieldAlign; |
| 564 | unionSize = fieldSize; |
Anders Carlsson | 697f659 | 2009-07-23 03:43:54 +0000 | [diff] [blame] | 565 | } |
| 566 | } |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 567 | |
Anders Carlsson | 697f659 | 2009-07-23 03:43:54 +0000 | [diff] [blame] | 568 | // Now add our field. |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 569 | if (unionType) { |
| 570 | AppendField(CharUnits::Zero(), unionType); |
Anders Carlsson | 0e91275 | 2009-09-03 22:56:02 +0000 | [diff] [blame] | 571 | |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 572 | if (getTypeAlignment(unionType) > layout.getAlignment()) { |
Anders Carlsson | 0e91275 | 2009-09-03 22:56:02 +0000 | [diff] [blame] | 573 | // We need a packed struct. |
| 574 | Packed = true; |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 575 | unionAlign = CharUnits::One(); |
Anders Carlsson | 0e91275 | 2009-09-03 22:56:02 +0000 | [diff] [blame] | 576 | } |
| 577 | } |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 578 | if (unionAlign.isZero()) { |
Chandler Carruth | 52b6ac2 | 2012-03-04 12:16:40 +0000 | [diff] [blame] | 579 | (void)hasOnlyZeroSizedBitFields; |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 580 | assert(hasOnlyZeroSizedBitFields && |
Anders Carlsson | b1ef991 | 2010-01-28 18:22:03 +0000 | [diff] [blame] | 581 | "0-align record did not have all zero-sized bit-fields!"); |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 582 | unionAlign = CharUnits::One(); |
Fariborz Jahanian | e8e631c | 2009-11-06 20:47:40 +0000 | [diff] [blame] | 583 | } |
Daniel Dunbar | 23ee4b7 | 2010-03-31 00:11:27 +0000 | [diff] [blame] | 584 | |
Anders Carlsson | 697f659 | 2009-07-23 03:43:54 +0000 | [diff] [blame] | 585 | // Append tail padding. |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 586 | CharUnits recordSize = layout.getSize(); |
| 587 | if (recordSize > unionSize) |
| 588 | AppendPadding(recordSize, unionAlign); |
Anders Carlsson | 697f659 | 2009-07-23 03:43:54 +0000 | [diff] [blame] | 589 | } |
| 590 | |
Eli Friedman | 3c840aa | 2011-12-12 23:13:20 +0000 | [diff] [blame] | 591 | bool CGRecordLayoutBuilder::LayoutBase(const CXXRecordDecl *base, |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 592 | const CGRecordLayout &baseLayout, |
| 593 | CharUnits baseOffset) { |
Anders Carlsson | fcaaa69 | 2011-04-17 21:56:13 +0000 | [diff] [blame] | 594 | ResizeLastBaseFieldIfNecessary(baseOffset); |
| 595 | |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 596 | AppendPadding(baseOffset, CharUnits::One()); |
Anders Carlsson | a518b2a | 2010-12-04 23:59:48 +0000 | [diff] [blame] | 597 | |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 598 | const ASTRecordLayout &baseASTLayout |
| 599 | = Types.getContext().getASTRecordLayout(base); |
| 600 | |
Anders Carlsson | fcaaa69 | 2011-04-17 21:56:13 +0000 | [diff] [blame] | 601 | LastLaidOutBase.Offset = NextFieldOffset; |
| 602 | LastLaidOutBase.NonVirtualSize = baseASTLayout.getNonVirtualSize(); |
| 603 | |
Chris Lattner | a5f58b0 | 2011-07-09 17:41:47 +0000 | [diff] [blame] | 604 | llvm::StructType *subobjectType = baseLayout.getBaseSubobjectLLVMType(); |
Eli Friedman | 3c840aa | 2011-12-12 23:13:20 +0000 | [diff] [blame] | 605 | if (getTypeAlignment(subobjectType) > Alignment) |
| 606 | return false; |
| 607 | |
Anders Carlsson | fcaaa69 | 2011-04-17 21:56:13 +0000 | [diff] [blame] | 608 | AppendField(baseOffset, subobjectType); |
Eli Friedman | 3c840aa | 2011-12-12 23:13:20 +0000 | [diff] [blame] | 609 | return true; |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 610 | } |
| 611 | |
Eli Friedman | 3c840aa | 2011-12-12 23:13:20 +0000 | [diff] [blame] | 612 | bool CGRecordLayoutBuilder::LayoutNonVirtualBase(const CXXRecordDecl *base, |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 613 | CharUnits baseOffset) { |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 614 | // Ignore empty bases. |
Eli Friedman | 3c840aa | 2011-12-12 23:13:20 +0000 | [diff] [blame] | 615 | if (base->isEmpty()) return true; |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 616 | |
| 617 | const CGRecordLayout &baseLayout = Types.getCGRecordLayout(base); |
| 618 | if (IsZeroInitializableAsBase) { |
| 619 | assert(IsZeroInitializable && |
| 620 | "class zero-initializable as base but not as complete object"); |
| 621 | |
| 622 | IsZeroInitializable = IsZeroInitializableAsBase = |
| 623 | baseLayout.isZeroInitializableAsBase(); |
| 624 | } |
| 625 | |
Eli Friedman | 3c840aa | 2011-12-12 23:13:20 +0000 | [diff] [blame] | 626 | if (!LayoutBase(base, baseLayout, baseOffset)) |
| 627 | return false; |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 628 | NonVirtualBases[base] = (FieldTypes.size() - 1); |
Eli Friedman | 3c840aa | 2011-12-12 23:13:20 +0000 | [diff] [blame] | 629 | return true; |
Anders Carlsson | a518b2a | 2010-12-04 23:59:48 +0000 | [diff] [blame] | 630 | } |
| 631 | |
Eli Friedman | 3c840aa | 2011-12-12 23:13:20 +0000 | [diff] [blame] | 632 | bool |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 633 | CGRecordLayoutBuilder::LayoutVirtualBase(const CXXRecordDecl *base, |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 634 | CharUnits baseOffset) { |
Anders Carlsson | 1f95ee3 | 2010-11-25 01:59:35 +0000 | [diff] [blame] | 635 | // Ignore empty bases. |
Eli Friedman | 3c840aa | 2011-12-12 23:13:20 +0000 | [diff] [blame] | 636 | if (base->isEmpty()) return true; |
Anders Carlsson | 1f95ee3 | 2010-11-25 01:59:35 +0000 | [diff] [blame] | 637 | |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 638 | const CGRecordLayout &baseLayout = Types.getCGRecordLayout(base); |
| 639 | if (IsZeroInitializable) |
| 640 | IsZeroInitializable = baseLayout.isZeroInitializableAsBase(); |
Anders Carlsson | 1f95ee3 | 2010-11-25 01:59:35 +0000 | [diff] [blame] | 641 | |
Eli Friedman | 3c840aa | 2011-12-12 23:13:20 +0000 | [diff] [blame] | 642 | if (!LayoutBase(base, baseLayout, baseOffset)) |
| 643 | return false; |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 644 | VirtualBases[base] = (FieldTypes.size() - 1); |
Eli Friedman | 3c840aa | 2011-12-12 23:13:20 +0000 | [diff] [blame] | 645 | return true; |
Anders Carlsson | 1f95ee3 | 2010-11-25 01:59:35 +0000 | [diff] [blame] | 646 | } |
| 647 | |
Eli Friedman | 3c840aa | 2011-12-12 23:13:20 +0000 | [diff] [blame] | 648 | bool |
John McCall | 0153cd3 | 2011-11-08 04:01:03 +0000 | [diff] [blame] | 649 | CGRecordLayoutBuilder::MSLayoutVirtualBases(const CXXRecordDecl *RD, |
| 650 | const ASTRecordLayout &Layout) { |
| 651 | if (!RD->getNumVBases()) |
Eli Friedman | 3c840aa | 2011-12-12 23:13:20 +0000 | [diff] [blame] | 652 | return true; |
John McCall | 0153cd3 | 2011-11-08 04:01:03 +0000 | [diff] [blame] | 653 | |
| 654 | // The vbases list is uniqued and ordered by a depth-first |
| 655 | // traversal, which is what we need here. |
| 656 | for (CXXRecordDecl::base_class_const_iterator I = RD->vbases_begin(), |
| 657 | E = RD->vbases_end(); I != E; ++I) { |
| 658 | |
| 659 | const CXXRecordDecl *BaseDecl = |
| 660 | cast<CXXRecordDecl>(I->getType()->castAs<RecordType>()->getDecl()); |
| 661 | |
| 662 | CharUnits vbaseOffset = Layout.getVBaseClassOffset(BaseDecl); |
Eli Friedman | 3c840aa | 2011-12-12 23:13:20 +0000 | [diff] [blame] | 663 | if (!LayoutVirtualBase(BaseDecl, vbaseOffset)) |
| 664 | return false; |
John McCall | 0153cd3 | 2011-11-08 04:01:03 +0000 | [diff] [blame] | 665 | } |
Eli Friedman | 3c840aa | 2011-12-12 23:13:20 +0000 | [diff] [blame] | 666 | return true; |
John McCall | 0153cd3 | 2011-11-08 04:01:03 +0000 | [diff] [blame] | 667 | } |
| 668 | |
Anders Carlsson | a459adb | 2010-11-28 19:18:44 +0000 | [diff] [blame] | 669 | /// LayoutVirtualBases - layout the non-virtual bases of a record decl. |
Eli Friedman | 3c840aa | 2011-12-12 23:13:20 +0000 | [diff] [blame] | 670 | bool |
Anders Carlsson | a459adb | 2010-11-28 19:18:44 +0000 | [diff] [blame] | 671 | CGRecordLayoutBuilder::LayoutVirtualBases(const CXXRecordDecl *RD, |
| 672 | const ASTRecordLayout &Layout) { |
| 673 | for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(), |
| 674 | E = RD->bases_end(); I != E; ++I) { |
| 675 | const CXXRecordDecl *BaseDecl = |
| 676 | cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl()); |
| 677 | |
| 678 | // We only want to lay out virtual bases that aren't indirect primary bases |
| 679 | // of some other base. |
| 680 | if (I->isVirtual() && !IndirectPrimaryBases.count(BaseDecl)) { |
| 681 | // Only lay out the base once. |
| 682 | if (!LaidOutVirtualBases.insert(BaseDecl)) |
| 683 | continue; |
| 684 | |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 685 | CharUnits vbaseOffset = Layout.getVBaseClassOffset(BaseDecl); |
Eli Friedman | 3c840aa | 2011-12-12 23:13:20 +0000 | [diff] [blame] | 686 | if (!LayoutVirtualBase(BaseDecl, vbaseOffset)) |
| 687 | return false; |
Anders Carlsson | a459adb | 2010-11-28 19:18:44 +0000 | [diff] [blame] | 688 | } |
| 689 | |
| 690 | if (!BaseDecl->getNumVBases()) { |
| 691 | // This base isn't interesting since it doesn't have any virtual bases. |
| 692 | continue; |
| 693 | } |
| 694 | |
Eli Friedman | 3c840aa | 2011-12-12 23:13:20 +0000 | [diff] [blame] | 695 | if (!LayoutVirtualBases(BaseDecl, Layout)) |
| 696 | return false; |
Anders Carlsson | a459adb | 2010-11-28 19:18:44 +0000 | [diff] [blame] | 697 | } |
Eli Friedman | 3c840aa | 2011-12-12 23:13:20 +0000 | [diff] [blame] | 698 | return true; |
Anders Carlsson | a459adb | 2010-11-28 19:18:44 +0000 | [diff] [blame] | 699 | } |
| 700 | |
Eli Friedman | 3c840aa | 2011-12-12 23:13:20 +0000 | [diff] [blame] | 701 | bool |
Anders Carlsson | af9e5af | 2010-05-18 05:12:20 +0000 | [diff] [blame] | 702 | CGRecordLayoutBuilder::LayoutNonVirtualBases(const CXXRecordDecl *RD, |
| 703 | const ASTRecordLayout &Layout) { |
| 704 | const CXXRecordDecl *PrimaryBase = Layout.getPrimaryBase(); |
| 705 | |
John McCall | 0153cd3 | 2011-11-08 04:01:03 +0000 | [diff] [blame] | 706 | // If we have a primary base, lay it out first. |
| 707 | if (PrimaryBase) { |
Eli Friedman | 3c840aa | 2011-12-12 23:13:20 +0000 | [diff] [blame] | 708 | if (!Layout.isPrimaryBaseVirtual()) { |
| 709 | if (!LayoutNonVirtualBase(PrimaryBase, CharUnits::Zero())) |
| 710 | return false; |
| 711 | } else { |
| 712 | if (!LayoutVirtualBase(PrimaryBase, CharUnits::Zero())) |
| 713 | return false; |
| 714 | } |
Anders Carlsson | af9e5af | 2010-05-18 05:12:20 +0000 | [diff] [blame] | 715 | |
John McCall | 0153cd3 | 2011-11-08 04:01:03 +0000 | [diff] [blame] | 716 | // Otherwise, add a vtable / vf-table if the layout says to do so. |
| 717 | } else if (Types.getContext().getTargetInfo().getCXXABI() == CXXABI_Microsoft |
| 718 | ? Layout.getVFPtrOffset() != CharUnits::fromQuantity(-1) |
| 719 | : RD->isDynamicClass()) { |
| 720 | llvm::Type *FunctionType = |
| 721 | llvm::FunctionType::get(llvm::Type::getInt32Ty(Types.getLLVMContext()), |
| 722 | /*isVarArg=*/true); |
| 723 | llvm::Type *VTableTy = FunctionType->getPointerTo(); |
| 724 | |
| 725 | assert(NextFieldOffset.isZero() && |
| 726 | "VTable pointer must come first!"); |
| 727 | AppendField(CharUnits::Zero(), VTableTy->getPointerTo()); |
Anders Carlsson | af9e5af | 2010-05-18 05:12:20 +0000 | [diff] [blame] | 728 | } |
| 729 | |
| 730 | // Layout the non-virtual bases. |
| 731 | for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(), |
| 732 | E = RD->bases_end(); I != E; ++I) { |
| 733 | if (I->isVirtual()) |
| 734 | continue; |
| 735 | |
| 736 | const CXXRecordDecl *BaseDecl = |
| 737 | cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl()); |
| 738 | |
| 739 | // We've already laid out the primary base. |
Anders Carlsson | 7f95cd1 | 2010-11-24 23:12:57 +0000 | [diff] [blame] | 740 | if (BaseDecl == PrimaryBase && !Layout.isPrimaryBaseVirtual()) |
Anders Carlsson | af9e5af | 2010-05-18 05:12:20 +0000 | [diff] [blame] | 741 | continue; |
| 742 | |
Eli Friedman | 3c840aa | 2011-12-12 23:13:20 +0000 | [diff] [blame] | 743 | if (!LayoutNonVirtualBase(BaseDecl, Layout.getBaseClassOffset(BaseDecl))) |
| 744 | return false; |
Anders Carlsson | d681a29 | 2009-12-16 17:27:20 +0000 | [diff] [blame] | 745 | } |
John McCall | 0153cd3 | 2011-11-08 04:01:03 +0000 | [diff] [blame] | 746 | |
| 747 | // Add a vb-table pointer if the layout insists. |
| 748 | if (Layout.getVBPtrOffset() != CharUnits::fromQuantity(-1)) { |
| 749 | CharUnits VBPtrOffset = Layout.getVBPtrOffset(); |
| 750 | llvm::Type *Vbptr = llvm::Type::getInt32PtrTy(Types.getLLVMContext()); |
| 751 | AppendPadding(VBPtrOffset, getTypeAlignment(Vbptr)); |
| 752 | AppendField(VBPtrOffset, Vbptr); |
| 753 | } |
Eli Friedman | 3c840aa | 2011-12-12 23:13:20 +0000 | [diff] [blame] | 754 | |
| 755 | return true; |
Anders Carlsson | d681a29 | 2009-12-16 17:27:20 +0000 | [diff] [blame] | 756 | } |
| 757 | |
Argyrios Kyrtzidis | 648fcbe | 2010-12-10 00:11:00 +0000 | [diff] [blame] | 758 | bool |
Anders Carlsson | c1351ca | 2010-11-09 05:25:47 +0000 | [diff] [blame] | 759 | CGRecordLayoutBuilder::ComputeNonVirtualBaseType(const CXXRecordDecl *RD) { |
| 760 | const ASTRecordLayout &Layout = Types.getContext().getASTRecordLayout(RD); |
| 761 | |
Ken Dyck | bec0285 | 2011-02-08 02:02:47 +0000 | [diff] [blame] | 762 | CharUnits NonVirtualSize = Layout.getNonVirtualSize(); |
| 763 | CharUnits NonVirtualAlign = Layout.getNonVirtualAlign(); |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 764 | CharUnits AlignedNonVirtualTypeSize = |
| 765 | NonVirtualSize.RoundUpToAlignment(NonVirtualAlign); |
Anders Carlsson | c1351ca | 2010-11-09 05:25:47 +0000 | [diff] [blame] | 766 | |
Anders Carlsson | c1351ca | 2010-11-09 05:25:47 +0000 | [diff] [blame] | 767 | // First check if we can use the same fields as for the complete class. |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 768 | CharUnits RecordSize = Layout.getSize(); |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 769 | if (AlignedNonVirtualTypeSize == RecordSize) |
Argyrios Kyrtzidis | 648fcbe | 2010-12-10 00:11:00 +0000 | [diff] [blame] | 770 | return true; |
Anders Carlsson | c1351ca | 2010-11-09 05:25:47 +0000 | [diff] [blame] | 771 | |
Anders Carlsson | c1351ca | 2010-11-09 05:25:47 +0000 | [diff] [blame] | 772 | // Check if we need padding. |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 773 | CharUnits AlignedNextFieldOffset = |
| 774 | NextFieldOffset.RoundUpToAlignment(getAlignmentAsLLVMStruct()); |
Anders Carlsson | c1351ca | 2010-11-09 05:25:47 +0000 | [diff] [blame] | 775 | |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 776 | if (AlignedNextFieldOffset > AlignedNonVirtualTypeSize) { |
| 777 | assert(!Packed && "cannot layout even as packed struct"); |
Argyrios Kyrtzidis | 648fcbe | 2010-12-10 00:11:00 +0000 | [diff] [blame] | 778 | return false; // Needs packing. |
Anders Carlsson | c1351ca | 2010-11-09 05:25:47 +0000 | [diff] [blame] | 779 | } |
| 780 | |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 781 | bool needsPadding = (AlignedNonVirtualTypeSize != AlignedNextFieldOffset); |
| 782 | if (needsPadding) { |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 783 | CharUnits NumBytes = AlignedNonVirtualTypeSize - AlignedNextFieldOffset; |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 784 | FieldTypes.push_back(getByteArrayType(NumBytes)); |
| 785 | } |
Chris Lattner | a5f58b0 | 2011-07-09 17:41:47 +0000 | [diff] [blame] | 786 | |
Chris Lattner | 5ec04a5 | 2011-08-12 17:43:31 +0000 | [diff] [blame] | 787 | BaseSubobjectType = llvm::StructType::create(Types.getLLVMContext(), |
| 788 | FieldTypes, "", Packed); |
Chris Lattner | a5f58b0 | 2011-07-09 17:41:47 +0000 | [diff] [blame] | 789 | Types.addRecordTypeName(RD, BaseSubobjectType, ".base"); |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 790 | |
Chris Lattner | a5f58b0 | 2011-07-09 17:41:47 +0000 | [diff] [blame] | 791 | // Pull the padding back off. |
| 792 | if (needsPadding) |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 793 | FieldTypes.pop_back(); |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 794 | |
Argyrios Kyrtzidis | 648fcbe | 2010-12-10 00:11:00 +0000 | [diff] [blame] | 795 | return true; |
Anders Carlsson | c1351ca | 2010-11-09 05:25:47 +0000 | [diff] [blame] | 796 | } |
| 797 | |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 798 | bool CGRecordLayoutBuilder::LayoutFields(const RecordDecl *D) { |
| 799 | assert(!D->isUnion() && "Can't call LayoutFields on a union!"); |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 800 | assert(!Alignment.isZero() && "Did not set alignment!"); |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 801 | |
Anders Carlsson | 697f659 | 2009-07-23 03:43:54 +0000 | [diff] [blame] | 802 | const ASTRecordLayout &Layout = Types.getContext().getASTRecordLayout(D); |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 803 | |
Anders Carlsson | c1351ca | 2010-11-09 05:25:47 +0000 | [diff] [blame] | 804 | const CXXRecordDecl *RD = dyn_cast<CXXRecordDecl>(D); |
| 805 | if (RD) |
Eli Friedman | 3c840aa | 2011-12-12 23:13:20 +0000 | [diff] [blame] | 806 | if (!LayoutNonVirtualBases(RD, Layout)) |
| 807 | return false; |
Daniel Dunbar | 23ee4b7 | 2010-03-31 00:11:27 +0000 | [diff] [blame] | 808 | |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 809 | unsigned FieldNo = 0; |
Fariborz Jahanian | bcb23a1 | 2011-04-26 23:52:16 +0000 | [diff] [blame] | 810 | const FieldDecl *LastFD = 0; |
| 811 | |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 812 | for (RecordDecl::field_iterator Field = D->field_begin(), |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 813 | FieldEnd = D->field_end(); Field != FieldEnd; ++Field, ++FieldNo) { |
Fariborz Jahanian | bcb23a1 | 2011-04-26 23:52:16 +0000 | [diff] [blame] | 814 | if (IsMsStruct) { |
| 815 | // Zero-length bitfields following non-bitfield members are |
| 816 | // ignored: |
| 817 | const FieldDecl *FD = (*Field); |
Fariborz Jahanian | fc0fe6e | 2011-05-03 20:21:04 +0000 | [diff] [blame] | 818 | if (Types.getContext().ZeroBitfieldFollowsNonBitfield(FD, LastFD)) { |
Fariborz Jahanian | bcb23a1 | 2011-04-26 23:52:16 +0000 | [diff] [blame] | 819 | --FieldNo; |
| 820 | continue; |
| 821 | } |
| 822 | LastFD = FD; |
| 823 | } |
| 824 | |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 825 | if (!LayoutField(*Field, Layout.getFieldOffset(FieldNo))) { |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 826 | assert(!Packed && |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 827 | "Could not layout fields even with a packed LLVM struct!"); |
| 828 | return false; |
| 829 | } |
| 830 | } |
| 831 | |
Anders Carlsson | 1f95ee3 | 2010-11-25 01:59:35 +0000 | [diff] [blame] | 832 | if (RD) { |
Anders Carlsson | a459adb | 2010-11-28 19:18:44 +0000 | [diff] [blame] | 833 | // We've laid out the non-virtual bases and the fields, now compute the |
| 834 | // non-virtual base field types. |
Argyrios Kyrtzidis | 648fcbe | 2010-12-10 00:11:00 +0000 | [diff] [blame] | 835 | if (!ComputeNonVirtualBaseType(RD)) { |
| 836 | assert(!Packed && "Could not layout even with a packed LLVM struct!"); |
| 837 | return false; |
| 838 | } |
Anders Carlsson | 1f95ee3 | 2010-11-25 01:59:35 +0000 | [diff] [blame] | 839 | |
John McCall | 0153cd3 | 2011-11-08 04:01:03 +0000 | [diff] [blame] | 840 | // Lay out the virtual bases. The MS ABI uses a different |
| 841 | // algorithm here due to the lack of primary virtual bases. |
| 842 | if (Types.getContext().getTargetInfo().getCXXABI() != CXXABI_Microsoft) { |
| 843 | RD->getIndirectPrimaryBases(IndirectPrimaryBases); |
| 844 | if (Layout.isPrimaryBaseVirtual()) |
| 845 | IndirectPrimaryBases.insert(Layout.getPrimaryBase()); |
| 846 | |
Eli Friedman | 3c840aa | 2011-12-12 23:13:20 +0000 | [diff] [blame] | 847 | if (!LayoutVirtualBases(RD, Layout)) |
| 848 | return false; |
John McCall | 0153cd3 | 2011-11-08 04:01:03 +0000 | [diff] [blame] | 849 | } else { |
Eli Friedman | 3c840aa | 2011-12-12 23:13:20 +0000 | [diff] [blame] | 850 | if (!MSLayoutVirtualBases(RD, Layout)) |
| 851 | return false; |
John McCall | 0153cd3 | 2011-11-08 04:01:03 +0000 | [diff] [blame] | 852 | } |
Anders Carlsson | a459adb | 2010-11-28 19:18:44 +0000 | [diff] [blame] | 853 | } |
Anders Carlsson | c1351ca | 2010-11-09 05:25:47 +0000 | [diff] [blame] | 854 | |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 855 | // Append tail padding if necessary. |
Ken Dyck | 272b6fa | 2011-04-24 16:53:44 +0000 | [diff] [blame] | 856 | AppendTailPadding(Layout.getSize()); |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 857 | |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 858 | return true; |
| 859 | } |
| 860 | |
Ken Dyck | 272b6fa | 2011-04-24 16:53:44 +0000 | [diff] [blame] | 861 | void CGRecordLayoutBuilder::AppendTailPadding(CharUnits RecordSize) { |
| 862 | ResizeLastBaseFieldIfNecessary(RecordSize); |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 863 | |
Ken Dyck | 272b6fa | 2011-04-24 16:53:44 +0000 | [diff] [blame] | 864 | assert(NextFieldOffset <= RecordSize && "Size mismatch!"); |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 865 | |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 866 | CharUnits AlignedNextFieldOffset = |
| 867 | NextFieldOffset.RoundUpToAlignment(getAlignmentAsLLVMStruct()); |
Anders Carlsson | 220bf4f | 2009-12-08 01:24:23 +0000 | [diff] [blame] | 868 | |
Ken Dyck | 272b6fa | 2011-04-24 16:53:44 +0000 | [diff] [blame] | 869 | if (AlignedNextFieldOffset == RecordSize) { |
Anders Carlsson | 220bf4f | 2009-12-08 01:24:23 +0000 | [diff] [blame] | 870 | // We don't need any padding. |
| 871 | return; |
| 872 | } |
Daniel Dunbar | 23ee4b7 | 2010-03-31 00:11:27 +0000 | [diff] [blame] | 873 | |
Ken Dyck | 272b6fa | 2011-04-24 16:53:44 +0000 | [diff] [blame] | 874 | CharUnits NumPadBytes = RecordSize - NextFieldOffset; |
Anders Carlsson | b97a3ec | 2009-07-27 14:55:54 +0000 | [diff] [blame] | 875 | AppendBytes(NumPadBytes); |
| 876 | } |
| 877 | |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 878 | void CGRecordLayoutBuilder::AppendField(CharUnits fieldOffset, |
Chris Lattner | a5f58b0 | 2011-07-09 17:41:47 +0000 | [diff] [blame] | 879 | llvm::Type *fieldType) { |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 880 | CharUnits fieldSize = |
| 881 | CharUnits::fromQuantity(Types.getTargetData().getTypeAllocSize(fieldType)); |
Anders Carlsson | 6e853bf | 2009-07-24 02:45:50 +0000 | [diff] [blame] | 882 | |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 883 | FieldTypes.push_back(fieldType); |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 884 | |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 885 | NextFieldOffset = fieldOffset + fieldSize; |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 886 | BitsAvailableInLastField = 0; |
| 887 | } |
| 888 | |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 889 | void CGRecordLayoutBuilder::AppendPadding(CharUnits fieldOffset, |
| 890 | CharUnits fieldAlignment) { |
| 891 | assert(NextFieldOffset <= fieldOffset && |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 892 | "Incorrect field layout!"); |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 893 | |
John McCall | 0153cd3 | 2011-11-08 04:01:03 +0000 | [diff] [blame] | 894 | // Do nothing if we're already at the right offset. |
| 895 | if (fieldOffset == NextFieldOffset) return; |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 896 | |
John McCall | 0153cd3 | 2011-11-08 04:01:03 +0000 | [diff] [blame] | 897 | // If we're not emitting a packed LLVM type, try to avoid adding |
| 898 | // unnecessary padding fields. |
| 899 | if (!Packed) { |
| 900 | // Round up the field offset to the alignment of the field type. |
| 901 | CharUnits alignedNextFieldOffset = |
| 902 | NextFieldOffset.RoundUpToAlignment(fieldAlignment); |
| 903 | assert(alignedNextFieldOffset <= fieldOffset); |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 904 | |
John McCall | 0153cd3 | 2011-11-08 04:01:03 +0000 | [diff] [blame] | 905 | // If that's the right offset, we're done. |
| 906 | if (alignedNextFieldOffset == fieldOffset) return; |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 907 | } |
John McCall | 0153cd3 | 2011-11-08 04:01:03 +0000 | [diff] [blame] | 908 | |
| 909 | // Otherwise we need explicit padding. |
| 910 | CharUnits padding = fieldOffset - NextFieldOffset; |
| 911 | AppendBytes(padding); |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 912 | } |
| 913 | |
Anders Carlsson | fcaaa69 | 2011-04-17 21:56:13 +0000 | [diff] [blame] | 914 | bool CGRecordLayoutBuilder::ResizeLastBaseFieldIfNecessary(CharUnits offset) { |
| 915 | // Check if we have a base to resize. |
| 916 | if (!LastLaidOutBase.isValid()) |
| 917 | return false; |
| 918 | |
| 919 | // This offset does not overlap with the tail padding. |
| 920 | if (offset >= NextFieldOffset) |
| 921 | return false; |
| 922 | |
| 923 | // Restore the field offset and append an i8 array instead. |
| 924 | FieldTypes.pop_back(); |
| 925 | NextFieldOffset = LastLaidOutBase.Offset; |
| 926 | AppendBytes(LastLaidOutBase.NonVirtualSize); |
| 927 | LastLaidOutBase.invalidate(); |
| 928 | |
| 929 | return true; |
| 930 | } |
| 931 | |
Chris Lattner | a5f58b0 | 2011-07-09 17:41:47 +0000 | [diff] [blame] | 932 | llvm::Type *CGRecordLayoutBuilder::getByteArrayType(CharUnits numBytes) { |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 933 | assert(!numBytes.isZero() && "Empty byte arrays aren't allowed."); |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 934 | |
Chris Lattner | a5f58b0 | 2011-07-09 17:41:47 +0000 | [diff] [blame] | 935 | llvm::Type *Ty = llvm::Type::getInt8Ty(Types.getLLVMContext()); |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 936 | if (numBytes > CharUnits::One()) |
| 937 | Ty = llvm::ArrayType::get(Ty, numBytes.getQuantity()); |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 938 | |
Anders Carlsson | c1351ca | 2010-11-09 05:25:47 +0000 | [diff] [blame] | 939 | return Ty; |
| 940 | } |
| 941 | |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 942 | void CGRecordLayoutBuilder::AppendBytes(CharUnits numBytes) { |
| 943 | if (numBytes.isZero()) |
Anders Carlsson | c1351ca | 2010-11-09 05:25:47 +0000 | [diff] [blame] | 944 | return; |
| 945 | |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 946 | // Append the padding field |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 947 | AppendField(NextFieldOffset, getByteArrayType(numBytes)); |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 948 | } |
| 949 | |
Chris Lattner | 2192fe5 | 2011-07-18 04:24:23 +0000 | [diff] [blame] | 950 | CharUnits CGRecordLayoutBuilder::getTypeAlignment(llvm::Type *Ty) const { |
Anders Carlsson | d78fc89 | 2009-07-23 17:24:40 +0000 | [diff] [blame] | 951 | if (Packed) |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 952 | return CharUnits::One(); |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 953 | |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 954 | return CharUnits::fromQuantity(Types.getTargetData().getABITypeAlignment(Ty)); |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 955 | } |
| 956 | |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 957 | CharUnits CGRecordLayoutBuilder::getAlignmentAsLLVMStruct() const { |
Anders Carlsson | acf877b | 2010-11-28 23:06:23 +0000 | [diff] [blame] | 958 | if (Packed) |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 959 | return CharUnits::One(); |
Anders Carlsson | acf877b | 2010-11-28 23:06:23 +0000 | [diff] [blame] | 960 | |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 961 | CharUnits maxAlignment = CharUnits::One(); |
Anders Carlsson | acf877b | 2010-11-28 23:06:23 +0000 | [diff] [blame] | 962 | for (size_t i = 0; i != FieldTypes.size(); ++i) |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 963 | maxAlignment = std::max(maxAlignment, getTypeAlignment(FieldTypes[i])); |
Anders Carlsson | acf877b | 2010-11-28 23:06:23 +0000 | [diff] [blame] | 964 | |
John McCall | 4d9f142 | 2011-02-15 22:21:29 +0000 | [diff] [blame] | 965 | return maxAlignment; |
Anders Carlsson | acf877b | 2010-11-28 23:06:23 +0000 | [diff] [blame] | 966 | } |
| 967 | |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 968 | /// Merge in whether a field of the given type is zero-initializable. |
John McCall | 614dbdc | 2010-08-22 21:01:12 +0000 | [diff] [blame] | 969 | void CGRecordLayoutBuilder::CheckZeroInitializable(QualType T) { |
Anders Carlsson | d606de7 | 2009-08-23 01:25:01 +0000 | [diff] [blame] | 970 | // This record already contains a member pointer. |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 971 | if (!IsZeroInitializableAsBase) |
Anders Carlsson | d606de7 | 2009-08-23 01:25:01 +0000 | [diff] [blame] | 972 | return; |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 973 | |
Anders Carlsson | d606de7 | 2009-08-23 01:25:01 +0000 | [diff] [blame] | 974 | // Can only have member pointers if we're compiling C++. |
David Blaikie | bbafb8a | 2012-03-11 07:00:24 +0000 | [diff] [blame^] | 975 | if (!Types.getContext().getLangOpts().CPlusPlus) |
Anders Carlsson | d606de7 | 2009-08-23 01:25:01 +0000 | [diff] [blame] | 976 | return; |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 977 | |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 978 | const Type *elementType = T->getBaseElementTypeUnsafe(); |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 979 | |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 980 | if (const MemberPointerType *MPT = elementType->getAs<MemberPointerType>()) { |
John McCall | 614dbdc | 2010-08-22 21:01:12 +0000 | [diff] [blame] | 981 | if (!Types.getCXXABI().isZeroInitializable(MPT)) |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 982 | IsZeroInitializable = IsZeroInitializableAsBase = false; |
| 983 | } else if (const RecordType *RT = elementType->getAs<RecordType>()) { |
Anders Carlsson | e8bfe41 | 2010-02-02 05:17:25 +0000 | [diff] [blame] | 984 | const CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl()); |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 985 | const CGRecordLayout &Layout = Types.getCGRecordLayout(RD); |
| 986 | if (!Layout.isZeroInitializable()) |
| 987 | IsZeroInitializable = IsZeroInitializableAsBase = false; |
Daniel Dunbar | 23ee4b7 | 2010-03-31 00:11:27 +0000 | [diff] [blame] | 988 | } |
Anders Carlsson | d606de7 | 2009-08-23 01:25:01 +0000 | [diff] [blame] | 989 | } |
| 990 | |
Chris Lattner | a5f58b0 | 2011-07-09 17:41:47 +0000 | [diff] [blame] | 991 | CGRecordLayout *CodeGenTypes::ComputeRecordLayout(const RecordDecl *D, |
| 992 | llvm::StructType *Ty) { |
Daniel Dunbar | 23ee4b7 | 2010-03-31 00:11:27 +0000 | [diff] [blame] | 993 | CGRecordLayoutBuilder Builder(*this); |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 994 | |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 995 | Builder.Layout(D); |
Anders Carlsson | e1d5ca5 | 2009-07-24 15:20:52 +0000 | [diff] [blame] | 996 | |
Chris Lattner | a5f58b0 | 2011-07-09 17:41:47 +0000 | [diff] [blame] | 997 | Ty->setBody(Builder.FieldTypes, Builder.Packed); |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 998 | |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 999 | // If we're in C++, compute the base subobject type. |
Chris Lattner | a5f58b0 | 2011-07-09 17:41:47 +0000 | [diff] [blame] | 1000 | llvm::StructType *BaseTy = 0; |
Eli Friedman | 09d272d | 2012-01-13 03:58:31 +0000 | [diff] [blame] | 1001 | if (isa<CXXRecordDecl>(D) && !D->isUnion()) { |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 1002 | BaseTy = Builder.BaseSubobjectType; |
| 1003 | if (!BaseTy) BaseTy = Ty; |
Anders Carlsson | c1351ca | 2010-11-09 05:25:47 +0000 | [diff] [blame] | 1004 | } |
| 1005 | |
Daniel Dunbar | 034299e | 2010-03-31 01:09:11 +0000 | [diff] [blame] | 1006 | CGRecordLayout *RL = |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 1007 | new CGRecordLayout(Ty, BaseTy, Builder.IsZeroInitializable, |
| 1008 | Builder.IsZeroInitializableAsBase); |
Daniel Dunbar | 034299e | 2010-03-31 01:09:11 +0000 | [diff] [blame] | 1009 | |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 1010 | RL->NonVirtualBases.swap(Builder.NonVirtualBases); |
| 1011 | RL->CompleteObjectVirtualBases.swap(Builder.VirtualBases); |
Anders Carlsson | 061ca52 | 2010-05-18 05:22:06 +0000 | [diff] [blame] | 1012 | |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 1013 | // Add all the field numbers. |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 1014 | RL->FieldInfo.swap(Builder.Fields); |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 1015 | |
| 1016 | // Add bitfield info. |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 1017 | RL->BitFields.swap(Builder.BitFields); |
Mike Stump | 11289f4 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 1018 | |
Daniel Dunbar | 2ea5183 | 2010-04-19 20:44:47 +0000 | [diff] [blame] | 1019 | // Dump the layout, if requested. |
David Blaikie | bbafb8a | 2012-03-11 07:00:24 +0000 | [diff] [blame^] | 1020 | if (getContext().getLangOpts().DumpRecordLayouts) { |
Daniel Dunbar | ccabe48 | 2010-04-19 20:44:53 +0000 | [diff] [blame] | 1021 | llvm::errs() << "\n*** Dumping IRgen Record Layout\n"; |
Daniel Dunbar | b935b93 | 2010-04-13 20:58:55 +0000 | [diff] [blame] | 1022 | llvm::errs() << "Record: "; |
| 1023 | D->dump(); |
| 1024 | llvm::errs() << "\nLayout: "; |
Daniel Dunbar | b97bff9 | 2010-04-12 18:14:18 +0000 | [diff] [blame] | 1025 | RL->dump(); |
Daniel Dunbar | b935b93 | 2010-04-13 20:58:55 +0000 | [diff] [blame] | 1026 | } |
Daniel Dunbar | b97bff9 | 2010-04-12 18:14:18 +0000 | [diff] [blame] | 1027 | |
Daniel Dunbar | 488f55c | 2010-04-22 02:35:46 +0000 | [diff] [blame] | 1028 | #ifndef NDEBUG |
Daniel Dunbar | 2ea5183 | 2010-04-19 20:44:47 +0000 | [diff] [blame] | 1029 | // Verify that the computed LLVM struct size matches the AST layout size. |
Anders Carlsson | c1351ca | 2010-11-09 05:25:47 +0000 | [diff] [blame] | 1030 | const ASTRecordLayout &Layout = getContext().getASTRecordLayout(D); |
| 1031 | |
Ken Dyck | b0fcc59 | 2011-02-11 01:54:29 +0000 | [diff] [blame] | 1032 | uint64_t TypeSizeInBits = getContext().toBits(Layout.getSize()); |
Daniel Dunbar | 488f55c | 2010-04-22 02:35:46 +0000 | [diff] [blame] | 1033 | assert(TypeSizeInBits == getTargetData().getTypeAllocSizeInBits(Ty) && |
Daniel Dunbar | 2ea5183 | 2010-04-19 20:44:47 +0000 | [diff] [blame] | 1034 | "Type size mismatch!"); |
| 1035 | |
Anders Carlsson | c1351ca | 2010-11-09 05:25:47 +0000 | [diff] [blame] | 1036 | if (BaseTy) { |
Ken Dyck | bec0285 | 2011-02-08 02:02:47 +0000 | [diff] [blame] | 1037 | CharUnits NonVirtualSize = Layout.getNonVirtualSize(); |
| 1038 | CharUnits NonVirtualAlign = Layout.getNonVirtualAlign(); |
| 1039 | CharUnits AlignedNonVirtualTypeSize = |
| 1040 | NonVirtualSize.RoundUpToAlignment(NonVirtualAlign); |
| 1041 | |
| 1042 | uint64_t AlignedNonVirtualTypeSizeInBits = |
Ken Dyck | b0fcc59 | 2011-02-11 01:54:29 +0000 | [diff] [blame] | 1043 | getContext().toBits(AlignedNonVirtualTypeSize); |
Anders Carlsson | c1351ca | 2010-11-09 05:25:47 +0000 | [diff] [blame] | 1044 | |
| 1045 | assert(AlignedNonVirtualTypeSizeInBits == |
| 1046 | getTargetData().getTypeAllocSizeInBits(BaseTy) && |
| 1047 | "Type size mismatch!"); |
| 1048 | } |
| 1049 | |
Daniel Dunbar | 2ba6744 | 2010-04-21 19:10:49 +0000 | [diff] [blame] | 1050 | // Verify that the LLVM and AST field offsets agree. |
Chris Lattner | 2192fe5 | 2011-07-18 04:24:23 +0000 | [diff] [blame] | 1051 | llvm::StructType *ST = |
Daniel Dunbar | 2ba6744 | 2010-04-21 19:10:49 +0000 | [diff] [blame] | 1052 | dyn_cast<llvm::StructType>(RL->getLLVMType()); |
| 1053 | const llvm::StructLayout *SL = getTargetData().getStructLayout(ST); |
| 1054 | |
| 1055 | const ASTRecordLayout &AST_RL = getContext().getASTRecordLayout(D); |
| 1056 | RecordDecl::field_iterator it = D->field_begin(); |
Fariborz Jahanian | bcb23a1 | 2011-04-26 23:52:16 +0000 | [diff] [blame] | 1057 | const FieldDecl *LastFD = 0; |
| 1058 | bool IsMsStruct = D->hasAttr<MsStructAttr>(); |
Daniel Dunbar | 2ba6744 | 2010-04-21 19:10:49 +0000 | [diff] [blame] | 1059 | for (unsigned i = 0, e = AST_RL.getFieldCount(); i != e; ++i, ++it) { |
| 1060 | const FieldDecl *FD = *it; |
Daniel Dunbar | 488f55c | 2010-04-22 02:35:46 +0000 | [diff] [blame] | 1061 | |
| 1062 | // For non-bit-fields, just check that the LLVM struct offset matches the |
| 1063 | // AST offset. |
| 1064 | if (!FD->isBitField()) { |
Daniel Dunbar | 2ba6744 | 2010-04-21 19:10:49 +0000 | [diff] [blame] | 1065 | unsigned FieldNo = RL->getLLVMFieldNo(FD); |
| 1066 | assert(AST_RL.getFieldOffset(i) == SL->getElementOffsetInBits(FieldNo) && |
| 1067 | "Invalid field offset!"); |
Fariborz Jahanian | bcb23a1 | 2011-04-26 23:52:16 +0000 | [diff] [blame] | 1068 | LastFD = FD; |
Daniel Dunbar | 488f55c | 2010-04-22 02:35:46 +0000 | [diff] [blame] | 1069 | continue; |
| 1070 | } |
| 1071 | |
Fariborz Jahanian | bcb23a1 | 2011-04-26 23:52:16 +0000 | [diff] [blame] | 1072 | if (IsMsStruct) { |
| 1073 | // Zero-length bitfields following non-bitfield members are |
| 1074 | // ignored: |
Fariborz Jahanian | fc0fe6e | 2011-05-03 20:21:04 +0000 | [diff] [blame] | 1075 | if (getContext().ZeroBitfieldFollowsNonBitfield(FD, LastFD)) { |
Fariborz Jahanian | bcb23a1 | 2011-04-26 23:52:16 +0000 | [diff] [blame] | 1076 | --i; |
| 1077 | continue; |
| 1078 | } |
| 1079 | LastFD = FD; |
| 1080 | } |
| 1081 | |
Daniel Dunbar | 488f55c | 2010-04-22 02:35:46 +0000 | [diff] [blame] | 1082 | // Ignore unnamed bit-fields. |
Fariborz Jahanian | bcb23a1 | 2011-04-26 23:52:16 +0000 | [diff] [blame] | 1083 | if (!FD->getDeclName()) { |
| 1084 | LastFD = FD; |
Daniel Dunbar | 488f55c | 2010-04-22 02:35:46 +0000 | [diff] [blame] | 1085 | continue; |
Fariborz Jahanian | bcb23a1 | 2011-04-26 23:52:16 +0000 | [diff] [blame] | 1086 | } |
| 1087 | |
Daniel Dunbar | 488f55c | 2010-04-22 02:35:46 +0000 | [diff] [blame] | 1088 | const CGBitFieldInfo &Info = RL->getBitFieldInfo(FD); |
| 1089 | for (unsigned i = 0, e = Info.getNumComponents(); i != e; ++i) { |
| 1090 | const CGBitFieldInfo::AccessInfo &AI = Info.getComponent(i); |
| 1091 | |
| 1092 | // Verify that every component access is within the structure. |
| 1093 | uint64_t FieldOffset = SL->getElementOffsetInBits(AI.FieldIndex); |
John McCall | 8a3c555 | 2011-02-26 08:41:59 +0000 | [diff] [blame] | 1094 | uint64_t AccessBitOffset = FieldOffset + |
Ken Dyck | f76759c | 2011-04-24 10:04:59 +0000 | [diff] [blame] | 1095 | getContext().toBits(AI.FieldByteOffset); |
Daniel Dunbar | 488f55c | 2010-04-22 02:35:46 +0000 | [diff] [blame] | 1096 | assert(AccessBitOffset + AI.AccessWidth <= TypeSizeInBits && |
| 1097 | "Invalid bit-field access (out of range)!"); |
Daniel Dunbar | 2ba6744 | 2010-04-21 19:10:49 +0000 | [diff] [blame] | 1098 | } |
| 1099 | } |
| 1100 | #endif |
Daniel Dunbar | 2ea5183 | 2010-04-19 20:44:47 +0000 | [diff] [blame] | 1101 | |
Daniel Dunbar | 034299e | 2010-03-31 01:09:11 +0000 | [diff] [blame] | 1102 | return RL; |
Anders Carlsson | 307846f | 2009-07-23 03:17:50 +0000 | [diff] [blame] | 1103 | } |
Daniel Dunbar | b97bff9 | 2010-04-12 18:14:18 +0000 | [diff] [blame] | 1104 | |
Chris Lattner | 0e62c1c | 2011-07-23 10:55:15 +0000 | [diff] [blame] | 1105 | void CGRecordLayout::print(raw_ostream &OS) const { |
Daniel Dunbar | b97bff9 | 2010-04-12 18:14:18 +0000 | [diff] [blame] | 1106 | OS << "<CGRecordLayout\n"; |
John McCall | 0217dfc2 | 2011-02-15 06:40:56 +0000 | [diff] [blame] | 1107 | OS << " LLVMType:" << *CompleteObjectType << "\n"; |
| 1108 | if (BaseSubobjectType) |
| 1109 | OS << " NonVirtualBaseLLVMType:" << *BaseSubobjectType << "\n"; |
John McCall | 614dbdc | 2010-08-22 21:01:12 +0000 | [diff] [blame] | 1110 | OS << " IsZeroInitializable:" << IsZeroInitializable << "\n"; |
Daniel Dunbar | b97bff9 | 2010-04-12 18:14:18 +0000 | [diff] [blame] | 1111 | OS << " BitFields:[\n"; |
Daniel Dunbar | b6f4b05 | 2010-04-22 02:35:36 +0000 | [diff] [blame] | 1112 | |
| 1113 | // Print bit-field infos in declaration order. |
| 1114 | std::vector<std::pair<unsigned, const CGBitFieldInfo*> > BFIs; |
Daniel Dunbar | b97bff9 | 2010-04-12 18:14:18 +0000 | [diff] [blame] | 1115 | for (llvm::DenseMap<const FieldDecl*, CGBitFieldInfo>::const_iterator |
| 1116 | it = BitFields.begin(), ie = BitFields.end(); |
| 1117 | it != ie; ++it) { |
Daniel Dunbar | b6f4b05 | 2010-04-22 02:35:36 +0000 | [diff] [blame] | 1118 | const RecordDecl *RD = it->first->getParent(); |
| 1119 | unsigned Index = 0; |
| 1120 | for (RecordDecl::field_iterator |
| 1121 | it2 = RD->field_begin(); *it2 != it->first; ++it2) |
| 1122 | ++Index; |
| 1123 | BFIs.push_back(std::make_pair(Index, &it->second)); |
| 1124 | } |
| 1125 | llvm::array_pod_sort(BFIs.begin(), BFIs.end()); |
| 1126 | for (unsigned i = 0, e = BFIs.size(); i != e; ++i) { |
Daniel Dunbar | b935b93 | 2010-04-13 20:58:55 +0000 | [diff] [blame] | 1127 | OS.indent(4); |
Daniel Dunbar | b6f4b05 | 2010-04-22 02:35:36 +0000 | [diff] [blame] | 1128 | BFIs[i].second->print(OS); |
Daniel Dunbar | b97bff9 | 2010-04-12 18:14:18 +0000 | [diff] [blame] | 1129 | OS << "\n"; |
| 1130 | } |
Daniel Dunbar | b6f4b05 | 2010-04-22 02:35:36 +0000 | [diff] [blame] | 1131 | |
Daniel Dunbar | b97bff9 | 2010-04-12 18:14:18 +0000 | [diff] [blame] | 1132 | OS << "]>\n"; |
| 1133 | } |
| 1134 | |
| 1135 | void CGRecordLayout::dump() const { |
| 1136 | print(llvm::errs()); |
| 1137 | } |
| 1138 | |
Chris Lattner | 0e62c1c | 2011-07-23 10:55:15 +0000 | [diff] [blame] | 1139 | void CGBitFieldInfo::print(raw_ostream &OS) const { |
Daniel Dunbar | b97bff9 | 2010-04-12 18:14:18 +0000 | [diff] [blame] | 1140 | OS << "<CGBitFieldInfo"; |
Daniel Dunbar | b97bff9 | 2010-04-12 18:14:18 +0000 | [diff] [blame] | 1141 | OS << " Size:" << Size; |
Daniel Dunbar | b935b93 | 2010-04-13 20:58:55 +0000 | [diff] [blame] | 1142 | OS << " IsSigned:" << IsSigned << "\n"; |
| 1143 | |
| 1144 | OS.indent(4 + strlen("<CGBitFieldInfo")); |
| 1145 | OS << " NumComponents:" << getNumComponents(); |
| 1146 | OS << " Components: ["; |
| 1147 | if (getNumComponents()) { |
| 1148 | OS << "\n"; |
| 1149 | for (unsigned i = 0, e = getNumComponents(); i != e; ++i) { |
| 1150 | const AccessInfo &AI = getComponent(i); |
| 1151 | OS.indent(8); |
| 1152 | OS << "<AccessInfo" |
| 1153 | << " FieldIndex:" << AI.FieldIndex |
Ken Dyck | f76759c | 2011-04-24 10:04:59 +0000 | [diff] [blame] | 1154 | << " FieldByteOffset:" << AI.FieldByteOffset.getQuantity() |
Daniel Dunbar | b935b93 | 2010-04-13 20:58:55 +0000 | [diff] [blame] | 1155 | << " FieldBitStart:" << AI.FieldBitStart |
| 1156 | << " AccessWidth:" << AI.AccessWidth << "\n"; |
| 1157 | OS.indent(8 + strlen("<AccessInfo")); |
Ken Dyck | 27337a8 | 2011-04-24 10:13:17 +0000 | [diff] [blame] | 1158 | OS << " AccessAlignment:" << AI.AccessAlignment.getQuantity() |
Daniel Dunbar | b935b93 | 2010-04-13 20:58:55 +0000 | [diff] [blame] | 1159 | << " TargetBitOffset:" << AI.TargetBitOffset |
| 1160 | << " TargetBitWidth:" << AI.TargetBitWidth |
| 1161 | << ">\n"; |
| 1162 | } |
| 1163 | OS.indent(4); |
| 1164 | } |
| 1165 | OS << "]>"; |
Daniel Dunbar | b97bff9 | 2010-04-12 18:14:18 +0000 | [diff] [blame] | 1166 | } |
| 1167 | |
| 1168 | void CGBitFieldInfo::dump() const { |
| 1169 | print(llvm::errs()); |
| 1170 | } |