Chris Lattner | 9cbe4f0 | 2011-07-09 17:41:47 +0000 | [diff] [blame] | 1 | //===--- CGCall.cpp - Encapsulate calling convention details ----*- C++ -*-===// |
Daniel Dunbar | 0dbe227 | 2008-09-08 21:33:45 +0000 | [diff] [blame] | 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
| 10 | // These classes wrap the information about a call or function |
| 11 | // definition used to handle ABI compliancy. |
| 12 | // |
| 13 | //===----------------------------------------------------------------------===// |
| 14 | |
| 15 | #include "CGCall.h" |
John McCall | 4c40d98 | 2010-08-31 07:33:07 +0000 | [diff] [blame] | 16 | #include "CGCXXABI.h" |
Chris Lattner | ce93399 | 2010-06-29 16:40:28 +0000 | [diff] [blame] | 17 | #include "ABIInfo.h" |
Daniel Dunbar | 0dbe227 | 2008-09-08 21:33:45 +0000 | [diff] [blame] | 18 | #include "CodeGenFunction.h" |
Daniel Dunbar | b768807 | 2008-09-10 00:41:16 +0000 | [diff] [blame] | 19 | #include "CodeGenModule.h" |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 20 | #include "TargetInfo.h" |
Daniel Dunbar | 6b1da0e | 2008-10-13 17:02:26 +0000 | [diff] [blame] | 21 | #include "clang/Basic/TargetInfo.h" |
Daniel Dunbar | 0dbe227 | 2008-09-08 21:33:45 +0000 | [diff] [blame] | 22 | #include "clang/AST/Decl.h" |
Anders Carlsson | f6f8ae5 | 2009-04-03 22:48:58 +0000 | [diff] [blame] | 23 | #include "clang/AST/DeclCXX.h" |
Daniel Dunbar | 0dbe227 | 2008-09-08 21:33:45 +0000 | [diff] [blame] | 24 | #include "clang/AST/DeclObjC.h" |
Chandler Carruth | 06057ce | 2010-06-15 23:19:56 +0000 | [diff] [blame] | 25 | #include "clang/Frontend/CodeGenOptions.h" |
Devang Patel | d0646bd | 2008-09-24 01:01:36 +0000 | [diff] [blame] | 26 | #include "llvm/Attributes.h" |
Daniel Dunbar | d14151d | 2009-03-02 04:32:35 +0000 | [diff] [blame] | 27 | #include "llvm/Support/CallSite.h" |
Daniel Dunbar | 54d1ccb | 2009-01-27 01:36:03 +0000 | [diff] [blame] | 28 | #include "llvm/Target/TargetData.h" |
John McCall | f85e193 | 2011-06-15 23:02:42 +0000 | [diff] [blame] | 29 | #include "llvm/InlineAsm.h" |
Eli Friedman | 97cb5a4 | 2011-06-15 22:09:18 +0000 | [diff] [blame] | 30 | #include "llvm/Transforms/Utils/Local.h" |
Daniel Dunbar | 0dbe227 | 2008-09-08 21:33:45 +0000 | [diff] [blame] | 31 | using namespace clang; |
| 32 | using namespace CodeGen; |
| 33 | |
| 34 | /***/ |
| 35 | |
John McCall | 04a67a6 | 2010-02-05 21:31:56 +0000 | [diff] [blame] | 36 | static unsigned ClangCallConvToLLVMCallConv(CallingConv CC) { |
| 37 | switch (CC) { |
| 38 | default: return llvm::CallingConv::C; |
| 39 | case CC_X86StdCall: return llvm::CallingConv::X86_StdCall; |
| 40 | case CC_X86FastCall: return llvm::CallingConv::X86_FastCall; |
Douglas Gregor | f813a2c | 2010-05-18 16:57:00 +0000 | [diff] [blame] | 41 | case CC_X86ThisCall: return llvm::CallingConv::X86_ThisCall; |
Anton Korobeynikov | 414d896 | 2011-04-14 20:06:49 +0000 | [diff] [blame] | 42 | case CC_AAPCS: return llvm::CallingConv::ARM_AAPCS; |
| 43 | case CC_AAPCS_VFP: return llvm::CallingConv::ARM_AAPCS_VFP; |
Dawn Perchik | 52fc314 | 2010-09-03 01:29:35 +0000 | [diff] [blame] | 44 | // TODO: add support for CC_X86Pascal to llvm |
John McCall | 04a67a6 | 2010-02-05 21:31:56 +0000 | [diff] [blame] | 45 | } |
| 46 | } |
| 47 | |
John McCall | 0b0ef0a | 2010-02-24 07:14:12 +0000 | [diff] [blame] | 48 | /// Derives the 'this' type for codegen purposes, i.e. ignoring method |
| 49 | /// qualification. |
| 50 | /// FIXME: address space qualification? |
John McCall | ead608a | 2010-02-26 00:48:12 +0000 | [diff] [blame] | 51 | static CanQualType GetThisType(ASTContext &Context, const CXXRecordDecl *RD) { |
| 52 | QualType RecTy = Context.getTagDeclType(RD)->getCanonicalTypeInternal(); |
| 53 | return Context.getPointerType(CanQualType::CreateUnsafe(RecTy)); |
Daniel Dunbar | 45c25ba | 2008-09-10 04:01:49 +0000 | [diff] [blame] | 54 | } |
| 55 | |
John McCall | 0b0ef0a | 2010-02-24 07:14:12 +0000 | [diff] [blame] | 56 | /// Returns the canonical formal type of the given C++ method. |
John McCall | ead608a | 2010-02-26 00:48:12 +0000 | [diff] [blame] | 57 | static CanQual<FunctionProtoType> GetFormalType(const CXXMethodDecl *MD) { |
| 58 | return MD->getType()->getCanonicalTypeUnqualified() |
| 59 | .getAs<FunctionProtoType>(); |
John McCall | 0b0ef0a | 2010-02-24 07:14:12 +0000 | [diff] [blame] | 60 | } |
| 61 | |
| 62 | /// Returns the "extra-canonicalized" return type, which discards |
| 63 | /// qualifiers on the return type. Codegen doesn't care about them, |
| 64 | /// and it makes ABI code a little easier to be able to assume that |
| 65 | /// all parameter and return types are top-level unqualified. |
John McCall | ead608a | 2010-02-26 00:48:12 +0000 | [diff] [blame] | 66 | static CanQualType GetReturnType(QualType RetTy) { |
| 67 | return RetTy->getCanonicalTypeUnqualified().getUnqualifiedType(); |
John McCall | 0b0ef0a | 2010-02-24 07:14:12 +0000 | [diff] [blame] | 68 | } |
| 69 | |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 70 | /// Arrange the argument and result information for a value of the |
| 71 | /// given unprototyped function type. |
John McCall | 0b0ef0a | 2010-02-24 07:14:12 +0000 | [diff] [blame] | 72 | const CGFunctionInfo & |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 73 | CodeGenTypes::arrangeFunctionType(CanQual<FunctionNoProtoType> FTNP) { |
| 74 | // When translating an unprototyped function type, always use a |
| 75 | // variadic type. |
| 76 | return arrangeFunctionType(FTNP->getResultType().getUnqualifiedType(), |
| 77 | ArrayRef<CanQualType>(), |
| 78 | FTNP->getExtInfo(), |
| 79 | RequiredArgs(0)); |
John McCall | 0b0ef0a | 2010-02-24 07:14:12 +0000 | [diff] [blame] | 80 | } |
| 81 | |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 82 | /// Arrange the argument and result information for a value of the |
| 83 | /// given function type, on top of any implicit parameters already |
| 84 | /// stored. |
| 85 | static const CGFunctionInfo &arrangeFunctionType(CodeGenTypes &CGT, |
| 86 | SmallVectorImpl<CanQualType> &argTypes, |
Chris Lattner | 9cbe4f0 | 2011-07-09 17:41:47 +0000 | [diff] [blame] | 87 | CanQual<FunctionProtoType> FTP) { |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 88 | RequiredArgs required = RequiredArgs::forPrototypePlus(FTP, argTypes.size()); |
Daniel Dunbar | 541b63b | 2009-02-02 23:23:47 +0000 | [diff] [blame] | 89 | // FIXME: Kill copy. |
Daniel Dunbar | 45c25ba | 2008-09-10 04:01:49 +0000 | [diff] [blame] | 90 | for (unsigned i = 0, e = FTP->getNumArgs(); i != e; ++i) |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 91 | argTypes.push_back(FTP->getArgType(i)); |
| 92 | CanQualType resultType = FTP->getResultType().getUnqualifiedType(); |
| 93 | return CGT.arrangeFunctionType(resultType, argTypes, |
| 94 | FTP->getExtInfo(), required); |
John McCall | 0b0ef0a | 2010-02-24 07:14:12 +0000 | [diff] [blame] | 95 | } |
| 96 | |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 97 | /// Arrange the argument and result information for a value of the |
| 98 | /// given function type. |
John McCall | 0b0ef0a | 2010-02-24 07:14:12 +0000 | [diff] [blame] | 99 | const CGFunctionInfo & |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 100 | CodeGenTypes::arrangeFunctionType(CanQual<FunctionProtoType> FTP) { |
| 101 | SmallVector<CanQualType, 16> argTypes; |
| 102 | return ::arrangeFunctionType(*this, argTypes, FTP); |
Daniel Dunbar | bac7c25 | 2009-09-11 22:24:53 +0000 | [diff] [blame] | 103 | } |
| 104 | |
John McCall | 04a67a6 | 2010-02-05 21:31:56 +0000 | [diff] [blame] | 105 | static CallingConv getCallingConventionForDecl(const Decl *D) { |
Daniel Dunbar | bac7c25 | 2009-09-11 22:24:53 +0000 | [diff] [blame] | 106 | // Set the appropriate calling convention for the Function. |
| 107 | if (D->hasAttr<StdCallAttr>()) |
John McCall | 04a67a6 | 2010-02-05 21:31:56 +0000 | [diff] [blame] | 108 | return CC_X86StdCall; |
Daniel Dunbar | bac7c25 | 2009-09-11 22:24:53 +0000 | [diff] [blame] | 109 | |
| 110 | if (D->hasAttr<FastCallAttr>()) |
John McCall | 04a67a6 | 2010-02-05 21:31:56 +0000 | [diff] [blame] | 111 | return CC_X86FastCall; |
Daniel Dunbar | bac7c25 | 2009-09-11 22:24:53 +0000 | [diff] [blame] | 112 | |
Douglas Gregor | f813a2c | 2010-05-18 16:57:00 +0000 | [diff] [blame] | 113 | if (D->hasAttr<ThisCallAttr>()) |
| 114 | return CC_X86ThisCall; |
| 115 | |
Dawn Perchik | 52fc314 | 2010-09-03 01:29:35 +0000 | [diff] [blame] | 116 | if (D->hasAttr<PascalAttr>()) |
| 117 | return CC_X86Pascal; |
| 118 | |
Anton Korobeynikov | 414d896 | 2011-04-14 20:06:49 +0000 | [diff] [blame] | 119 | if (PcsAttr *PCS = D->getAttr<PcsAttr>()) |
| 120 | return (PCS->getPCS() == PcsAttr::AAPCS ? CC_AAPCS : CC_AAPCS_VFP); |
| 121 | |
John McCall | 04a67a6 | 2010-02-05 21:31:56 +0000 | [diff] [blame] | 122 | return CC_C; |
Daniel Dunbar | 45c25ba | 2008-09-10 04:01:49 +0000 | [diff] [blame] | 123 | } |
| 124 | |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 125 | /// Arrange the argument and result information for a call to an |
| 126 | /// unknown C++ non-static member function of the given abstract type. |
| 127 | /// The member function must be an ordinary function, i.e. not a |
| 128 | /// constructor or destructor. |
| 129 | const CGFunctionInfo & |
| 130 | CodeGenTypes::arrangeCXXMethodType(const CXXRecordDecl *RD, |
| 131 | const FunctionProtoType *FTP) { |
| 132 | SmallVector<CanQualType, 16> argTypes; |
John McCall | 0b0ef0a | 2010-02-24 07:14:12 +0000 | [diff] [blame] | 133 | |
Anders Carlsson | 375c31c | 2009-10-03 19:43:08 +0000 | [diff] [blame] | 134 | // Add the 'this' pointer. |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 135 | argTypes.push_back(GetThisType(Context, RD)); |
John McCall | 0b0ef0a | 2010-02-24 07:14:12 +0000 | [diff] [blame] | 136 | |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 137 | return ::arrangeFunctionType(*this, argTypes, |
Tilmann Scheller | 9c6082f | 2011-03-02 21:36:49 +0000 | [diff] [blame] | 138 | FTP->getCanonicalTypeUnqualified().getAs<FunctionProtoType>()); |
Anders Carlsson | 375c31c | 2009-10-03 19:43:08 +0000 | [diff] [blame] | 139 | } |
| 140 | |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 141 | /// Arrange the argument and result information for a declaration or |
| 142 | /// definition of the given C++ non-static member function. The |
| 143 | /// member function must be an ordinary function, i.e. not a |
| 144 | /// constructor or destructor. |
| 145 | const CGFunctionInfo & |
| 146 | CodeGenTypes::arrangeCXXMethodDeclaration(const CXXMethodDecl *MD) { |
John McCall | fc40028 | 2010-09-03 01:26:39 +0000 | [diff] [blame] | 147 | assert(!isa<CXXConstructorDecl>(MD) && "wrong method for contructors!"); |
| 148 | assert(!isa<CXXDestructorDecl>(MD) && "wrong method for destructors!"); |
| 149 | |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 150 | CanQual<FunctionProtoType> prototype = GetFormalType(MD); |
Mike Stump | 1eb4433 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 151 | |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 152 | if (MD->isInstance()) { |
| 153 | // The abstract case is perfectly fine. |
| 154 | return arrangeCXXMethodType(MD->getParent(), prototype.getTypePtr()); |
| 155 | } |
| 156 | |
| 157 | return arrangeFunctionType(prototype); |
Anders Carlsson | f6f8ae5 | 2009-04-03 22:48:58 +0000 | [diff] [blame] | 158 | } |
| 159 | |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 160 | /// Arrange the argument and result information for a declaration |
| 161 | /// or definition to the given constructor variant. |
| 162 | const CGFunctionInfo & |
| 163 | CodeGenTypes::arrangeCXXConstructorDeclaration(const CXXConstructorDecl *D, |
| 164 | CXXCtorType ctorKind) { |
| 165 | SmallVector<CanQualType, 16> argTypes; |
| 166 | argTypes.push_back(GetThisType(Context, D->getParent())); |
| 167 | CanQualType resultType = Context.VoidTy; |
Anders Carlsson | f6c56e2 | 2009-11-25 03:15:49 +0000 | [diff] [blame] | 168 | |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 169 | TheCXXABI.BuildConstructorSignature(D, ctorKind, resultType, argTypes); |
John McCall | 0b0ef0a | 2010-02-24 07:14:12 +0000 | [diff] [blame] | 170 | |
John McCall | 4c40d98 | 2010-08-31 07:33:07 +0000 | [diff] [blame] | 171 | CanQual<FunctionProtoType> FTP = GetFormalType(D); |
| 172 | |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 173 | RequiredArgs required = RequiredArgs::forPrototypePlus(FTP, argTypes.size()); |
| 174 | |
John McCall | 4c40d98 | 2010-08-31 07:33:07 +0000 | [diff] [blame] | 175 | // Add the formal parameters. |
| 176 | for (unsigned i = 0, e = FTP->getNumArgs(); i != e; ++i) |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 177 | argTypes.push_back(FTP->getArgType(i)); |
John McCall | 4c40d98 | 2010-08-31 07:33:07 +0000 | [diff] [blame] | 178 | |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 179 | return arrangeFunctionType(resultType, argTypes, FTP->getExtInfo(), required); |
Anders Carlsson | f6c56e2 | 2009-11-25 03:15:49 +0000 | [diff] [blame] | 180 | } |
| 181 | |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 182 | /// Arrange the argument and result information for a declaration, |
| 183 | /// definition, or call to the given destructor variant. It so |
| 184 | /// happens that all three cases produce the same information. |
| 185 | const CGFunctionInfo & |
| 186 | CodeGenTypes::arrangeCXXDestructor(const CXXDestructorDecl *D, |
| 187 | CXXDtorType dtorKind) { |
| 188 | SmallVector<CanQualType, 2> argTypes; |
| 189 | argTypes.push_back(GetThisType(Context, D->getParent())); |
| 190 | CanQualType resultType = Context.VoidTy; |
John McCall | 0b0ef0a | 2010-02-24 07:14:12 +0000 | [diff] [blame] | 191 | |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 192 | TheCXXABI.BuildDestructorSignature(D, dtorKind, resultType, argTypes); |
John McCall | 4c40d98 | 2010-08-31 07:33:07 +0000 | [diff] [blame] | 193 | |
| 194 | CanQual<FunctionProtoType> FTP = GetFormalType(D); |
| 195 | assert(FTP->getNumArgs() == 0 && "dtor with formal parameters"); |
| 196 | |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 197 | return arrangeFunctionType(resultType, argTypes, FTP->getExtInfo(), |
| 198 | RequiredArgs::All); |
Anders Carlsson | f6c56e2 | 2009-11-25 03:15:49 +0000 | [diff] [blame] | 199 | } |
| 200 | |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 201 | /// Arrange the argument and result information for the declaration or |
| 202 | /// definition of the given function. |
| 203 | const CGFunctionInfo & |
| 204 | CodeGenTypes::arrangeFunctionDeclaration(const FunctionDecl *FD) { |
Chris Lattner | 3eb67ca | 2009-05-12 20:27:19 +0000 | [diff] [blame] | 205 | if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(FD)) |
Anders Carlsson | f6f8ae5 | 2009-04-03 22:48:58 +0000 | [diff] [blame] | 206 | if (MD->isInstance()) |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 207 | return arrangeCXXMethodDeclaration(MD); |
Mike Stump | 1eb4433 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 208 | |
John McCall | ead608a | 2010-02-26 00:48:12 +0000 | [diff] [blame] | 209 | CanQualType FTy = FD->getType()->getCanonicalTypeUnqualified(); |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 210 | |
John McCall | ead608a | 2010-02-26 00:48:12 +0000 | [diff] [blame] | 211 | assert(isa<FunctionType>(FTy)); |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 212 | |
| 213 | // When declaring a function without a prototype, always use a |
| 214 | // non-variadic type. |
| 215 | if (isa<FunctionNoProtoType>(FTy)) { |
| 216 | CanQual<FunctionNoProtoType> noProto = FTy.getAs<FunctionNoProtoType>(); |
| 217 | return arrangeFunctionType(noProto->getResultType(), |
| 218 | ArrayRef<CanQualType>(), |
| 219 | noProto->getExtInfo(), |
| 220 | RequiredArgs::All); |
| 221 | } |
| 222 | |
John McCall | ead608a | 2010-02-26 00:48:12 +0000 | [diff] [blame] | 223 | assert(isa<FunctionProtoType>(FTy)); |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 224 | return arrangeFunctionType(FTy.getAs<FunctionProtoType>()); |
Daniel Dunbar | 0dbe227 | 2008-09-08 21:33:45 +0000 | [diff] [blame] | 225 | } |
| 226 | |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 227 | /// Arrange the argument and result information for the declaration or |
| 228 | /// definition of an Objective-C method. |
| 229 | const CGFunctionInfo & |
| 230 | CodeGenTypes::arrangeObjCMethodDeclaration(const ObjCMethodDecl *MD) { |
| 231 | // It happens that this is the same as a call with no optional |
| 232 | // arguments, except also using the formal 'self' type. |
| 233 | return arrangeObjCMessageSendSignature(MD, MD->getSelfDecl()->getType()); |
| 234 | } |
| 235 | |
| 236 | /// Arrange the argument and result information for the function type |
| 237 | /// through which to perform a send to the given Objective-C method, |
| 238 | /// using the given receiver type. The receiver type is not always |
| 239 | /// the 'self' type of the method or even an Objective-C pointer type. |
| 240 | /// This is *not* the right method for actually performing such a |
| 241 | /// message send, due to the possibility of optional arguments. |
| 242 | const CGFunctionInfo & |
| 243 | CodeGenTypes::arrangeObjCMessageSendSignature(const ObjCMethodDecl *MD, |
| 244 | QualType receiverType) { |
| 245 | SmallVector<CanQualType, 16> argTys; |
| 246 | argTys.push_back(Context.getCanonicalParamType(receiverType)); |
| 247 | argTys.push_back(Context.getCanonicalParamType(Context.getObjCSelType())); |
Daniel Dunbar | 541b63b | 2009-02-02 23:23:47 +0000 | [diff] [blame] | 248 | // FIXME: Kill copy? |
Argyrios Kyrtzidis | 491306a | 2011-10-03 06:37:04 +0000 | [diff] [blame] | 249 | for (ObjCMethodDecl::param_const_iterator i = MD->param_begin(), |
John McCall | 0b0ef0a | 2010-02-24 07:14:12 +0000 | [diff] [blame] | 250 | e = MD->param_end(); i != e; ++i) { |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 251 | argTys.push_back(Context.getCanonicalParamType((*i)->getType())); |
John McCall | 0b0ef0a | 2010-02-24 07:14:12 +0000 | [diff] [blame] | 252 | } |
John McCall | f85e193 | 2011-06-15 23:02:42 +0000 | [diff] [blame] | 253 | |
| 254 | FunctionType::ExtInfo einfo; |
| 255 | einfo = einfo.withCallingConv(getCallingConventionForDecl(MD)); |
| 256 | |
David Blaikie | 4e4d084 | 2012-03-11 07:00:24 +0000 | [diff] [blame] | 257 | if (getContext().getLangOpts().ObjCAutoRefCount && |
John McCall | f85e193 | 2011-06-15 23:02:42 +0000 | [diff] [blame] | 258 | MD->hasAttr<NSReturnsRetainedAttr>()) |
| 259 | einfo = einfo.withProducesResult(true); |
| 260 | |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 261 | RequiredArgs required = |
| 262 | (MD->isVariadic() ? RequiredArgs(argTys.size()) : RequiredArgs::All); |
| 263 | |
| 264 | return arrangeFunctionType(GetReturnType(MD->getResultType()), argTys, |
| 265 | einfo, required); |
Daniel Dunbar | 0dbe227 | 2008-09-08 21:33:45 +0000 | [diff] [blame] | 266 | } |
| 267 | |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 268 | const CGFunctionInfo & |
| 269 | CodeGenTypes::arrangeGlobalDeclaration(GlobalDecl GD) { |
Anders Carlsson | b2bcf1c | 2010-02-06 02:44:09 +0000 | [diff] [blame] | 270 | // FIXME: Do we need to handle ObjCMethodDecl? |
| 271 | const FunctionDecl *FD = cast<FunctionDecl>(GD.getDecl()); |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 272 | |
Anders Carlsson | b2bcf1c | 2010-02-06 02:44:09 +0000 | [diff] [blame] | 273 | if (const CXXConstructorDecl *CD = dyn_cast<CXXConstructorDecl>(FD)) |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 274 | return arrangeCXXConstructorDeclaration(CD, GD.getCtorType()); |
Anders Carlsson | b2bcf1c | 2010-02-06 02:44:09 +0000 | [diff] [blame] | 275 | |
| 276 | if (const CXXDestructorDecl *DD = dyn_cast<CXXDestructorDecl>(FD)) |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 277 | return arrangeCXXDestructor(DD, GD.getDtorType()); |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 278 | |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 279 | return arrangeFunctionDeclaration(FD); |
Anders Carlsson | b2bcf1c | 2010-02-06 02:44:09 +0000 | [diff] [blame] | 280 | } |
| 281 | |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 282 | /// Figure out the rules for calling a function with the given formal |
| 283 | /// type using the given arguments. The arguments are necessary |
| 284 | /// because the function might be unprototyped, in which case it's |
| 285 | /// target-dependent in crazy ways. |
| 286 | const CGFunctionInfo & |
| 287 | CodeGenTypes::arrangeFunctionCall(const CallArgList &args, |
| 288 | const FunctionType *fnType) { |
| 289 | RequiredArgs required = RequiredArgs::All; |
| 290 | if (const FunctionProtoType *proto = dyn_cast<FunctionProtoType>(fnType)) { |
| 291 | if (proto->isVariadic()) |
| 292 | required = RequiredArgs(proto->getNumArgs()); |
| 293 | } else if (CGM.getTargetCodeGenInfo() |
| 294 | .isNoProtoCallVariadic(args, cast<FunctionNoProtoType>(fnType))) { |
| 295 | required = RequiredArgs(0); |
| 296 | } |
| 297 | |
| 298 | return arrangeFunctionCall(fnType->getResultType(), args, |
| 299 | fnType->getExtInfo(), required); |
| 300 | } |
| 301 | |
| 302 | const CGFunctionInfo & |
| 303 | CodeGenTypes::arrangeFunctionCall(QualType resultType, |
| 304 | const CallArgList &args, |
| 305 | const FunctionType::ExtInfo &info, |
| 306 | RequiredArgs required) { |
Daniel Dunbar | 541b63b | 2009-02-02 23:23:47 +0000 | [diff] [blame] | 307 | // FIXME: Kill copy. |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 308 | SmallVector<CanQualType, 16> argTypes; |
| 309 | for (CallArgList::const_iterator i = args.begin(), e = args.end(); |
Daniel Dunbar | 725ad31 | 2009-01-31 02:19:00 +0000 | [diff] [blame] | 310 | i != e; ++i) |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 311 | argTypes.push_back(Context.getCanonicalParamType(i->Ty)); |
| 312 | return arrangeFunctionType(GetReturnType(resultType), argTypes, info, |
| 313 | required); |
Daniel Dunbar | 725ad31 | 2009-01-31 02:19:00 +0000 | [diff] [blame] | 314 | } |
| 315 | |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 316 | const CGFunctionInfo & |
| 317 | CodeGenTypes::arrangeFunctionDeclaration(QualType resultType, |
| 318 | const FunctionArgList &args, |
| 319 | const FunctionType::ExtInfo &info, |
| 320 | bool isVariadic) { |
Daniel Dunbar | 541b63b | 2009-02-02 23:23:47 +0000 | [diff] [blame] | 321 | // FIXME: Kill copy. |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 322 | SmallVector<CanQualType, 16> argTypes; |
| 323 | for (FunctionArgList::const_iterator i = args.begin(), e = args.end(); |
Daniel Dunbar | bb36d33 | 2009-02-02 21:43:58 +0000 | [diff] [blame] | 324 | i != e; ++i) |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 325 | argTypes.push_back(Context.getCanonicalParamType((*i)->getType())); |
| 326 | |
| 327 | RequiredArgs required = |
| 328 | (isVariadic ? RequiredArgs(args.size()) : RequiredArgs::All); |
| 329 | return arrangeFunctionType(GetReturnType(resultType), argTypes, info, |
| 330 | required); |
Daniel Dunbar | 541b63b | 2009-02-02 23:23:47 +0000 | [diff] [blame] | 331 | } |
| 332 | |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 333 | const CGFunctionInfo &CodeGenTypes::arrangeNullaryFunction() { |
| 334 | return arrangeFunctionType(getContext().VoidTy, ArrayRef<CanQualType>(), |
| 335 | FunctionType::ExtInfo(), RequiredArgs::All); |
John McCall | d26bc76 | 2011-03-09 04:27:21 +0000 | [diff] [blame] | 336 | } |
| 337 | |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 338 | /// Arrange the argument and result information for an abstract value |
| 339 | /// of a given function type. This is the method which all of the |
| 340 | /// above functions ultimately defer to. |
| 341 | const CGFunctionInfo & |
| 342 | CodeGenTypes::arrangeFunctionType(CanQualType resultType, |
| 343 | ArrayRef<CanQualType> argTypes, |
| 344 | const FunctionType::ExtInfo &info, |
| 345 | RequiredArgs required) { |
John McCall | ead608a | 2010-02-26 00:48:12 +0000 | [diff] [blame] | 346 | #ifndef NDEBUG |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 347 | for (ArrayRef<CanQualType>::const_iterator |
| 348 | I = argTypes.begin(), E = argTypes.end(); I != E; ++I) |
John McCall | ead608a | 2010-02-26 00:48:12 +0000 | [diff] [blame] | 349 | assert(I->isCanonicalAsParam()); |
| 350 | #endif |
| 351 | |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 352 | unsigned CC = ClangCallConvToLLVMCallConv(info.getCC()); |
John McCall | 04a67a6 | 2010-02-05 21:31:56 +0000 | [diff] [blame] | 353 | |
Daniel Dunbar | 40a6be6 | 2009-02-03 00:07:12 +0000 | [diff] [blame] | 354 | // Lookup or create unique function info. |
| 355 | llvm::FoldingSetNodeID ID; |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 356 | CGFunctionInfo::Profile(ID, info, required, resultType, argTypes); |
Daniel Dunbar | 40a6be6 | 2009-02-03 00:07:12 +0000 | [diff] [blame] | 357 | |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 358 | void *insertPos = 0; |
| 359 | CGFunctionInfo *FI = FunctionInfos.FindNodeOrInsertPos(ID, insertPos); |
Daniel Dunbar | 40a6be6 | 2009-02-03 00:07:12 +0000 | [diff] [blame] | 360 | if (FI) |
| 361 | return *FI; |
| 362 | |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 363 | // Construct the function info. We co-allocate the ArgInfos. |
| 364 | FI = CGFunctionInfo::create(CC, info, resultType, argTypes, required); |
| 365 | FunctionInfos.InsertNode(FI, insertPos); |
Daniel Dunbar | 88c2fa9 | 2009-02-03 05:31:23 +0000 | [diff] [blame] | 366 | |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 367 | bool inserted = FunctionsBeingProcessed.insert(FI); (void)inserted; |
| 368 | assert(inserted && "Recursively being processed?"); |
Chris Lattner | 71305cc | 2011-07-15 05:16:14 +0000 | [diff] [blame] | 369 | |
Daniel Dunbar | 88c2fa9 | 2009-02-03 05:31:23 +0000 | [diff] [blame] | 370 | // Compute ABI information. |
Chris Lattner | ee5dcd0 | 2010-07-29 02:31:05 +0000 | [diff] [blame] | 371 | getABIInfo().computeInfo(*FI); |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 372 | |
Chris Lattner | 800588f | 2010-07-29 06:26:06 +0000 | [diff] [blame] | 373 | // Loop over all of the computed argument and return value info. If any of |
| 374 | // them are direct or extend without a specified coerce type, specify the |
| 375 | // default now. |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 376 | ABIArgInfo &retInfo = FI->getReturnInfo(); |
| 377 | if (retInfo.canHaveCoerceToType() && retInfo.getCoerceToType() == 0) |
| 378 | retInfo.setCoerceToType(ConvertType(FI->getReturnType())); |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 379 | |
Chris Lattner | 800588f | 2010-07-29 06:26:06 +0000 | [diff] [blame] | 380 | for (CGFunctionInfo::arg_iterator I = FI->arg_begin(), E = FI->arg_end(); |
| 381 | I != E; ++I) |
| 382 | if (I->info.canHaveCoerceToType() && I->info.getCoerceToType() == 0) |
Chris Lattner | 9cbe4f0 | 2011-07-09 17:41:47 +0000 | [diff] [blame] | 383 | I->info.setCoerceToType(ConvertType(I->type)); |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 384 | |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 385 | bool erased = FunctionsBeingProcessed.erase(FI); (void)erased; |
| 386 | assert(erased && "Not in set?"); |
Chris Lattner | d26c071 | 2011-07-15 06:41:05 +0000 | [diff] [blame] | 387 | |
Daniel Dunbar | 40a6be6 | 2009-02-03 00:07:12 +0000 | [diff] [blame] | 388 | return *FI; |
Daniel Dunbar | 541b63b | 2009-02-02 23:23:47 +0000 | [diff] [blame] | 389 | } |
| 390 | |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 391 | CGFunctionInfo *CGFunctionInfo::create(unsigned llvmCC, |
| 392 | const FunctionType::ExtInfo &info, |
| 393 | CanQualType resultType, |
| 394 | ArrayRef<CanQualType> argTypes, |
| 395 | RequiredArgs required) { |
| 396 | void *buffer = operator new(sizeof(CGFunctionInfo) + |
| 397 | sizeof(ArgInfo) * (argTypes.size() + 1)); |
| 398 | CGFunctionInfo *FI = new(buffer) CGFunctionInfo(); |
| 399 | FI->CallingConvention = llvmCC; |
| 400 | FI->EffectiveCallingConvention = llvmCC; |
| 401 | FI->ASTCallingConvention = info.getCC(); |
| 402 | FI->NoReturn = info.getNoReturn(); |
| 403 | FI->ReturnsRetained = info.getProducesResult(); |
| 404 | FI->Required = required; |
| 405 | FI->HasRegParm = info.getHasRegParm(); |
| 406 | FI->RegParm = info.getRegParm(); |
| 407 | FI->NumArgs = argTypes.size(); |
| 408 | FI->getArgsBuffer()[0].type = resultType; |
| 409 | for (unsigned i = 0, e = argTypes.size(); i != e; ++i) |
| 410 | FI->getArgsBuffer()[i + 1].type = argTypes[i]; |
| 411 | return FI; |
Daniel Dunbar | 88c2fa9 | 2009-02-03 05:31:23 +0000 | [diff] [blame] | 412 | } |
| 413 | |
| 414 | /***/ |
| 415 | |
John McCall | 42e0611 | 2011-05-15 02:19:42 +0000 | [diff] [blame] | 416 | void CodeGenTypes::GetExpandedTypes(QualType type, |
Chris Lattner | 5f9e272 | 2011-07-23 10:55:15 +0000 | [diff] [blame] | 417 | SmallVectorImpl<llvm::Type*> &expandedTypes) { |
Bob Wilson | 194f06a | 2011-08-03 05:58:22 +0000 | [diff] [blame] | 418 | if (const ConstantArrayType *AT = Context.getAsConstantArrayType(type)) { |
| 419 | uint64_t NumElts = AT->getSize().getZExtValue(); |
| 420 | for (uint64_t Elt = 0; Elt < NumElts; ++Elt) |
| 421 | GetExpandedTypes(AT->getElementType(), expandedTypes); |
Anton Korobeynikov | eaf856d | 2012-04-13 11:22:00 +0000 | [diff] [blame] | 422 | } else if (const RecordType *RT = type->getAs<RecordType>()) { |
Bob Wilson | 194f06a | 2011-08-03 05:58:22 +0000 | [diff] [blame] | 423 | const RecordDecl *RD = RT->getDecl(); |
| 424 | assert(!RD->hasFlexibleArrayMember() && |
| 425 | "Cannot expand structure with flexible array."); |
Anton Korobeynikov | eaf856d | 2012-04-13 11:22:00 +0000 | [diff] [blame] | 426 | if (RD->isUnion()) { |
| 427 | // Unions can be here only in degenerative cases - all the fields are same |
| 428 | // after flattening. Thus we have to use the "largest" field. |
| 429 | const FieldDecl *LargestFD = 0; |
| 430 | CharUnits UnionSize = CharUnits::Zero(); |
| 431 | |
| 432 | for (RecordDecl::field_iterator i = RD->field_begin(), e = RD->field_end(); |
| 433 | i != e; ++i) { |
| 434 | const FieldDecl *FD = *i; |
| 435 | assert(!FD->isBitField() && |
| 436 | "Cannot expand structure with bit-field members."); |
| 437 | CharUnits FieldSize = getContext().getTypeSizeInChars(FD->getType()); |
| 438 | if (UnionSize < FieldSize) { |
| 439 | UnionSize = FieldSize; |
| 440 | LargestFD = FD; |
| 441 | } |
| 442 | } |
| 443 | if (LargestFD) |
| 444 | GetExpandedTypes(LargestFD->getType(), expandedTypes); |
| 445 | } else { |
| 446 | for (RecordDecl::field_iterator i = RD->field_begin(), e = RD->field_end(); |
| 447 | i != e; ++i) { |
| 448 | const FieldDecl *FD = *i; |
| 449 | assert(!FD->isBitField() && |
| 450 | "Cannot expand structure with bit-field members."); |
| 451 | GetExpandedTypes(FD->getType(), expandedTypes); |
| 452 | } |
Bob Wilson | 194f06a | 2011-08-03 05:58:22 +0000 | [diff] [blame] | 453 | } |
| 454 | } else if (const ComplexType *CT = type->getAs<ComplexType>()) { |
| 455 | llvm::Type *EltTy = ConvertType(CT->getElementType()); |
| 456 | expandedTypes.push_back(EltTy); |
| 457 | expandedTypes.push_back(EltTy); |
| 458 | } else |
| 459 | expandedTypes.push_back(ConvertType(type)); |
Daniel Dunbar | 5627377 | 2008-09-17 00:51:38 +0000 | [diff] [blame] | 460 | } |
| 461 | |
Mike Stump | 1eb4433 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 462 | llvm::Function::arg_iterator |
Daniel Dunbar | 5627377 | 2008-09-17 00:51:38 +0000 | [diff] [blame] | 463 | CodeGenFunction::ExpandTypeFromArgs(QualType Ty, LValue LV, |
| 464 | llvm::Function::arg_iterator AI) { |
Mike Stump | 1eb4433 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 465 | assert(LV.isSimple() && |
| 466 | "Unexpected non-simple lvalue during struct expansion."); |
Daniel Dunbar | 5627377 | 2008-09-17 00:51:38 +0000 | [diff] [blame] | 467 | llvm::Value *Addr = LV.getAddress(); |
Daniel Dunbar | 5627377 | 2008-09-17 00:51:38 +0000 | [diff] [blame] | 468 | |
Bob Wilson | 194f06a | 2011-08-03 05:58:22 +0000 | [diff] [blame] | 469 | if (const ConstantArrayType *AT = getContext().getAsConstantArrayType(Ty)) { |
| 470 | unsigned NumElts = AT->getSize().getZExtValue(); |
| 471 | QualType EltTy = AT->getElementType(); |
| 472 | for (unsigned Elt = 0; Elt < NumElts; ++Elt) { |
| 473 | llvm::Value *EltAddr = Builder.CreateConstGEP2_32(Addr, 0, Elt); |
| 474 | LValue LV = MakeAddrLValue(EltAddr, EltTy); |
| 475 | AI = ExpandTypeFromArgs(EltTy, LV, AI); |
Daniel Dunbar | 5627377 | 2008-09-17 00:51:38 +0000 | [diff] [blame] | 476 | } |
Anton Korobeynikov | eaf856d | 2012-04-13 11:22:00 +0000 | [diff] [blame] | 477 | } else if (const RecordType *RT = Ty->getAs<RecordType>()) { |
Bob Wilson | 194f06a | 2011-08-03 05:58:22 +0000 | [diff] [blame] | 478 | RecordDecl *RD = RT->getDecl(); |
Anton Korobeynikov | eaf856d | 2012-04-13 11:22:00 +0000 | [diff] [blame] | 479 | if (RD->isUnion()) { |
| 480 | // Unions can be here only in degenerative cases - all the fields are same |
| 481 | // after flattening. Thus we have to use the "largest" field. |
| 482 | const FieldDecl *LargestFD = 0; |
| 483 | CharUnits UnionSize = CharUnits::Zero(); |
Bob Wilson | 194f06a | 2011-08-03 05:58:22 +0000 | [diff] [blame] | 484 | |
Anton Korobeynikov | eaf856d | 2012-04-13 11:22:00 +0000 | [diff] [blame] | 485 | for (RecordDecl::field_iterator i = RD->field_begin(), e = RD->field_end(); |
| 486 | i != e; ++i) { |
| 487 | const FieldDecl *FD = *i; |
| 488 | assert(!FD->isBitField() && |
| 489 | "Cannot expand structure with bit-field members."); |
| 490 | CharUnits FieldSize = getContext().getTypeSizeInChars(FD->getType()); |
| 491 | if (UnionSize < FieldSize) { |
| 492 | UnionSize = FieldSize; |
| 493 | LargestFD = FD; |
| 494 | } |
| 495 | } |
| 496 | if (LargestFD) { |
| 497 | // FIXME: What are the right qualifiers here? |
| 498 | LValue LV = EmitLValueForField(Addr, LargestFD, 0); |
| 499 | AI = ExpandTypeFromArgs(LargestFD->getType(), LV, AI); |
| 500 | } |
| 501 | } else { |
| 502 | for (RecordDecl::field_iterator i = RD->field_begin(), e = RD->field_end(); |
| 503 | i != e; ++i) { |
| 504 | FieldDecl *FD = *i; |
| 505 | QualType FT = FD->getType(); |
| 506 | |
| 507 | // FIXME: What are the right qualifiers here? |
| 508 | LValue LV = EmitLValueForField(Addr, FD, 0); |
| 509 | AI = ExpandTypeFromArgs(FT, LV, AI); |
| 510 | } |
Bob Wilson | 194f06a | 2011-08-03 05:58:22 +0000 | [diff] [blame] | 511 | } |
| 512 | } else if (const ComplexType *CT = Ty->getAs<ComplexType>()) { |
| 513 | QualType EltTy = CT->getElementType(); |
| 514 | llvm::Value *RealAddr = Builder.CreateStructGEP(Addr, 0, "real"); |
| 515 | EmitStoreThroughLValue(RValue::get(AI++), MakeAddrLValue(RealAddr, EltTy)); |
Bob Wilson | bfcacd9 | 2011-10-22 21:42:34 +0000 | [diff] [blame] | 516 | llvm::Value *ImagAddr = Builder.CreateStructGEP(Addr, 1, "imag"); |
Bob Wilson | 194f06a | 2011-08-03 05:58:22 +0000 | [diff] [blame] | 517 | EmitStoreThroughLValue(RValue::get(AI++), MakeAddrLValue(ImagAddr, EltTy)); |
| 518 | } else { |
| 519 | EmitStoreThroughLValue(RValue::get(AI), LV); |
| 520 | ++AI; |
Daniel Dunbar | 5627377 | 2008-09-17 00:51:38 +0000 | [diff] [blame] | 521 | } |
| 522 | |
| 523 | return AI; |
| 524 | } |
| 525 | |
Chris Lattner | e7bb777 | 2010-06-27 06:04:18 +0000 | [diff] [blame] | 526 | /// EnterStructPointerForCoercedAccess - Given a struct pointer that we are |
Chris Lattner | 08dd2a0 | 2010-06-27 05:56:15 +0000 | [diff] [blame] | 527 | /// accessing some number of bytes out of it, try to gep into the struct to get |
| 528 | /// at its inner goodness. Dive as deep as possible without entering an element |
| 529 | /// with an in-memory size smaller than DstSize. |
| 530 | static llvm::Value * |
Chris Lattner | e7bb777 | 2010-06-27 06:04:18 +0000 | [diff] [blame] | 531 | EnterStructPointerForCoercedAccess(llvm::Value *SrcPtr, |
Chris Lattner | 2acc6e3 | 2011-07-18 04:24:23 +0000 | [diff] [blame] | 532 | llvm::StructType *SrcSTy, |
Chris Lattner | e7bb777 | 2010-06-27 06:04:18 +0000 | [diff] [blame] | 533 | uint64_t DstSize, CodeGenFunction &CGF) { |
Chris Lattner | 08dd2a0 | 2010-06-27 05:56:15 +0000 | [diff] [blame] | 534 | // We can't dive into a zero-element struct. |
| 535 | if (SrcSTy->getNumElements() == 0) return SrcPtr; |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 536 | |
Chris Lattner | 2acc6e3 | 2011-07-18 04:24:23 +0000 | [diff] [blame] | 537 | llvm::Type *FirstElt = SrcSTy->getElementType(0); |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 538 | |
Chris Lattner | 08dd2a0 | 2010-06-27 05:56:15 +0000 | [diff] [blame] | 539 | // If the first elt is at least as large as what we're looking for, or if the |
| 540 | // first element is the same size as the whole struct, we can enter it. |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 541 | uint64_t FirstEltSize = |
Chris Lattner | 08dd2a0 | 2010-06-27 05:56:15 +0000 | [diff] [blame] | 542 | CGF.CGM.getTargetData().getTypeAllocSize(FirstElt); |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 543 | if (FirstEltSize < DstSize && |
Chris Lattner | 08dd2a0 | 2010-06-27 05:56:15 +0000 | [diff] [blame] | 544 | FirstEltSize < CGF.CGM.getTargetData().getTypeAllocSize(SrcSTy)) |
| 545 | return SrcPtr; |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 546 | |
Chris Lattner | 08dd2a0 | 2010-06-27 05:56:15 +0000 | [diff] [blame] | 547 | // GEP into the first element. |
| 548 | SrcPtr = CGF.Builder.CreateConstGEP2_32(SrcPtr, 0, 0, "coerce.dive"); |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 549 | |
Chris Lattner | 08dd2a0 | 2010-06-27 05:56:15 +0000 | [diff] [blame] | 550 | // If the first element is a struct, recurse. |
Chris Lattner | 2acc6e3 | 2011-07-18 04:24:23 +0000 | [diff] [blame] | 551 | llvm::Type *SrcTy = |
Chris Lattner | 08dd2a0 | 2010-06-27 05:56:15 +0000 | [diff] [blame] | 552 | cast<llvm::PointerType>(SrcPtr->getType())->getElementType(); |
Chris Lattner | 2acc6e3 | 2011-07-18 04:24:23 +0000 | [diff] [blame] | 553 | if (llvm::StructType *SrcSTy = dyn_cast<llvm::StructType>(SrcTy)) |
Chris Lattner | e7bb777 | 2010-06-27 06:04:18 +0000 | [diff] [blame] | 554 | return EnterStructPointerForCoercedAccess(SrcPtr, SrcSTy, DstSize, CGF); |
Chris Lattner | 08dd2a0 | 2010-06-27 05:56:15 +0000 | [diff] [blame] | 555 | |
| 556 | return SrcPtr; |
| 557 | } |
| 558 | |
Chris Lattner | 6d11cdb | 2010-06-27 06:26:04 +0000 | [diff] [blame] | 559 | /// CoerceIntOrPtrToIntOrPtr - Convert a value Val to the specific Ty where both |
| 560 | /// are either integers or pointers. This does a truncation of the value if it |
| 561 | /// is too large or a zero extension if it is too small. |
| 562 | static llvm::Value *CoerceIntOrPtrToIntOrPtr(llvm::Value *Val, |
Chris Lattner | 2acc6e3 | 2011-07-18 04:24:23 +0000 | [diff] [blame] | 563 | llvm::Type *Ty, |
Chris Lattner | 6d11cdb | 2010-06-27 06:26:04 +0000 | [diff] [blame] | 564 | CodeGenFunction &CGF) { |
| 565 | if (Val->getType() == Ty) |
| 566 | return Val; |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 567 | |
Chris Lattner | 6d11cdb | 2010-06-27 06:26:04 +0000 | [diff] [blame] | 568 | if (isa<llvm::PointerType>(Val->getType())) { |
| 569 | // If this is Pointer->Pointer avoid conversion to and from int. |
| 570 | if (isa<llvm::PointerType>(Ty)) |
| 571 | return CGF.Builder.CreateBitCast(Val, Ty, "coerce.val"); |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 572 | |
Chris Lattner | 6d11cdb | 2010-06-27 06:26:04 +0000 | [diff] [blame] | 573 | // Convert the pointer to an integer so we can play with its width. |
Chris Lattner | 77b89b8 | 2010-06-27 07:15:29 +0000 | [diff] [blame] | 574 | Val = CGF.Builder.CreatePtrToInt(Val, CGF.IntPtrTy, "coerce.val.pi"); |
Chris Lattner | 6d11cdb | 2010-06-27 06:26:04 +0000 | [diff] [blame] | 575 | } |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 576 | |
Chris Lattner | 2acc6e3 | 2011-07-18 04:24:23 +0000 | [diff] [blame] | 577 | llvm::Type *DestIntTy = Ty; |
Chris Lattner | 6d11cdb | 2010-06-27 06:26:04 +0000 | [diff] [blame] | 578 | if (isa<llvm::PointerType>(DestIntTy)) |
Chris Lattner | 77b89b8 | 2010-06-27 07:15:29 +0000 | [diff] [blame] | 579 | DestIntTy = CGF.IntPtrTy; |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 580 | |
Chris Lattner | 6d11cdb | 2010-06-27 06:26:04 +0000 | [diff] [blame] | 581 | if (Val->getType() != DestIntTy) |
| 582 | Val = CGF.Builder.CreateIntCast(Val, DestIntTy, false, "coerce.val.ii"); |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 583 | |
Chris Lattner | 6d11cdb | 2010-06-27 06:26:04 +0000 | [diff] [blame] | 584 | if (isa<llvm::PointerType>(Ty)) |
| 585 | Val = CGF.Builder.CreateIntToPtr(Val, Ty, "coerce.val.ip"); |
| 586 | return Val; |
| 587 | } |
| 588 | |
Chris Lattner | 08dd2a0 | 2010-06-27 05:56:15 +0000 | [diff] [blame] | 589 | |
| 590 | |
Daniel Dunbar | 275e10d | 2009-02-02 19:06:38 +0000 | [diff] [blame] | 591 | /// CreateCoercedLoad - Create a load from \arg SrcPtr interpreted as |
| 592 | /// a pointer to an object of type \arg Ty. |
| 593 | /// |
| 594 | /// This safely handles the case when the src type is smaller than the |
| 595 | /// destination type; in this situation the values of bits which not |
| 596 | /// present in the src are undefined. |
| 597 | static llvm::Value *CreateCoercedLoad(llvm::Value *SrcPtr, |
Chris Lattner | 2acc6e3 | 2011-07-18 04:24:23 +0000 | [diff] [blame] | 598 | llvm::Type *Ty, |
Daniel Dunbar | 275e10d | 2009-02-02 19:06:38 +0000 | [diff] [blame] | 599 | CodeGenFunction &CGF) { |
Chris Lattner | 2acc6e3 | 2011-07-18 04:24:23 +0000 | [diff] [blame] | 600 | llvm::Type *SrcTy = |
Daniel Dunbar | 275e10d | 2009-02-02 19:06:38 +0000 | [diff] [blame] | 601 | cast<llvm::PointerType>(SrcPtr->getType())->getElementType(); |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 602 | |
Chris Lattner | 6ae0069 | 2010-06-28 22:51:39 +0000 | [diff] [blame] | 603 | // If SrcTy and Ty are the same, just do a load. |
| 604 | if (SrcTy == Ty) |
| 605 | return CGF.Builder.CreateLoad(SrcPtr); |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 606 | |
Duncan Sands | 9408c45 | 2009-05-09 07:08:47 +0000 | [diff] [blame] | 607 | uint64_t DstSize = CGF.CGM.getTargetData().getTypeAllocSize(Ty); |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 608 | |
Chris Lattner | 2acc6e3 | 2011-07-18 04:24:23 +0000 | [diff] [blame] | 609 | if (llvm::StructType *SrcSTy = dyn_cast<llvm::StructType>(SrcTy)) { |
Chris Lattner | e7bb777 | 2010-06-27 06:04:18 +0000 | [diff] [blame] | 610 | SrcPtr = EnterStructPointerForCoercedAccess(SrcPtr, SrcSTy, DstSize, CGF); |
Chris Lattner | 08dd2a0 | 2010-06-27 05:56:15 +0000 | [diff] [blame] | 611 | SrcTy = cast<llvm::PointerType>(SrcPtr->getType())->getElementType(); |
| 612 | } |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 613 | |
Chris Lattner | 08dd2a0 | 2010-06-27 05:56:15 +0000 | [diff] [blame] | 614 | uint64_t SrcSize = CGF.CGM.getTargetData().getTypeAllocSize(SrcTy); |
Daniel Dunbar | 275e10d | 2009-02-02 19:06:38 +0000 | [diff] [blame] | 615 | |
Chris Lattner | 6d11cdb | 2010-06-27 06:26:04 +0000 | [diff] [blame] | 616 | // If the source and destination are integer or pointer types, just do an |
| 617 | // extension or truncation to the desired type. |
| 618 | if ((isa<llvm::IntegerType>(Ty) || isa<llvm::PointerType>(Ty)) && |
| 619 | (isa<llvm::IntegerType>(SrcTy) || isa<llvm::PointerType>(SrcTy))) { |
| 620 | llvm::LoadInst *Load = CGF.Builder.CreateLoad(SrcPtr); |
| 621 | return CoerceIntOrPtrToIntOrPtr(Load, Ty, CGF); |
| 622 | } |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 623 | |
Daniel Dunbar | b225be4 | 2009-02-03 05:59:18 +0000 | [diff] [blame] | 624 | // If load is legal, just bitcast the src pointer. |
Daniel Dunbar | 7ef455b | 2009-05-13 18:54:26 +0000 | [diff] [blame] | 625 | if (SrcSize >= DstSize) { |
Mike Stump | f5408fe | 2009-05-16 07:57:57 +0000 | [diff] [blame] | 626 | // Generally SrcSize is never greater than DstSize, since this means we are |
| 627 | // losing bits. However, this can happen in cases where the structure has |
| 628 | // additional padding, for example due to a user specified alignment. |
Daniel Dunbar | 7ef455b | 2009-05-13 18:54:26 +0000 | [diff] [blame] | 629 | // |
Mike Stump | f5408fe | 2009-05-16 07:57:57 +0000 | [diff] [blame] | 630 | // FIXME: Assert that we aren't truncating non-padding bits when have access |
| 631 | // to that information. |
Daniel Dunbar | 275e10d | 2009-02-02 19:06:38 +0000 | [diff] [blame] | 632 | llvm::Value *Casted = |
| 633 | CGF.Builder.CreateBitCast(SrcPtr, llvm::PointerType::getUnqual(Ty)); |
Daniel Dunbar | 386621f | 2009-02-07 02:46:03 +0000 | [diff] [blame] | 634 | llvm::LoadInst *Load = CGF.Builder.CreateLoad(Casted); |
| 635 | // FIXME: Use better alignment / avoid requiring aligned load. |
| 636 | Load->setAlignment(1); |
| 637 | return Load; |
Daniel Dunbar | 275e10d | 2009-02-02 19:06:38 +0000 | [diff] [blame] | 638 | } |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 639 | |
Chris Lattner | 35b21b8 | 2010-06-27 01:06:27 +0000 | [diff] [blame] | 640 | // Otherwise do coercion through memory. This is stupid, but |
| 641 | // simple. |
| 642 | llvm::Value *Tmp = CGF.CreateTempAlloca(Ty); |
| 643 | llvm::Value *Casted = |
| 644 | CGF.Builder.CreateBitCast(Tmp, llvm::PointerType::getUnqual(SrcTy)); |
| 645 | llvm::StoreInst *Store = |
| 646 | CGF.Builder.CreateStore(CGF.Builder.CreateLoad(SrcPtr), Casted); |
| 647 | // FIXME: Use better alignment / avoid requiring aligned store. |
| 648 | Store->setAlignment(1); |
| 649 | return CGF.Builder.CreateLoad(Tmp); |
Daniel Dunbar | 275e10d | 2009-02-02 19:06:38 +0000 | [diff] [blame] | 650 | } |
| 651 | |
Eli Friedman | badea57 | 2011-05-17 21:08:01 +0000 | [diff] [blame] | 652 | // Function to store a first-class aggregate into memory. We prefer to |
| 653 | // store the elements rather than the aggregate to be more friendly to |
| 654 | // fast-isel. |
| 655 | // FIXME: Do we need to recurse here? |
| 656 | static void BuildAggStore(CodeGenFunction &CGF, llvm::Value *Val, |
| 657 | llvm::Value *DestPtr, bool DestIsVolatile, |
| 658 | bool LowAlignment) { |
| 659 | // Prefer scalar stores to first-class aggregate stores. |
Chris Lattner | 2acc6e3 | 2011-07-18 04:24:23 +0000 | [diff] [blame] | 660 | if (llvm::StructType *STy = |
Eli Friedman | badea57 | 2011-05-17 21:08:01 +0000 | [diff] [blame] | 661 | dyn_cast<llvm::StructType>(Val->getType())) { |
| 662 | for (unsigned i = 0, e = STy->getNumElements(); i != e; ++i) { |
| 663 | llvm::Value *EltPtr = CGF.Builder.CreateConstGEP2_32(DestPtr, 0, i); |
| 664 | llvm::Value *Elt = CGF.Builder.CreateExtractValue(Val, i); |
| 665 | llvm::StoreInst *SI = CGF.Builder.CreateStore(Elt, EltPtr, |
| 666 | DestIsVolatile); |
| 667 | if (LowAlignment) |
| 668 | SI->setAlignment(1); |
| 669 | } |
| 670 | } else { |
Bill Wendling | 0821263 | 2012-03-16 21:45:12 +0000 | [diff] [blame] | 671 | llvm::StoreInst *SI = CGF.Builder.CreateStore(Val, DestPtr, DestIsVolatile); |
| 672 | if (LowAlignment) |
| 673 | SI->setAlignment(1); |
Eli Friedman | badea57 | 2011-05-17 21:08:01 +0000 | [diff] [blame] | 674 | } |
| 675 | } |
| 676 | |
Daniel Dunbar | 275e10d | 2009-02-02 19:06:38 +0000 | [diff] [blame] | 677 | /// CreateCoercedStore - Create a store to \arg DstPtr from \arg Src, |
| 678 | /// where the source and destination may have different types. |
| 679 | /// |
| 680 | /// This safely handles the case when the src type is larger than the |
| 681 | /// destination type; the upper bits of the src will be lost. |
| 682 | static void CreateCoercedStore(llvm::Value *Src, |
| 683 | llvm::Value *DstPtr, |
Anders Carlsson | d2490a9 | 2009-12-24 20:40:36 +0000 | [diff] [blame] | 684 | bool DstIsVolatile, |
Daniel Dunbar | 275e10d | 2009-02-02 19:06:38 +0000 | [diff] [blame] | 685 | CodeGenFunction &CGF) { |
Chris Lattner | 2acc6e3 | 2011-07-18 04:24:23 +0000 | [diff] [blame] | 686 | llvm::Type *SrcTy = Src->getType(); |
| 687 | llvm::Type *DstTy = |
Daniel Dunbar | 275e10d | 2009-02-02 19:06:38 +0000 | [diff] [blame] | 688 | cast<llvm::PointerType>(DstPtr->getType())->getElementType(); |
Chris Lattner | 6ae0069 | 2010-06-28 22:51:39 +0000 | [diff] [blame] | 689 | if (SrcTy == DstTy) { |
| 690 | CGF.Builder.CreateStore(Src, DstPtr, DstIsVolatile); |
| 691 | return; |
| 692 | } |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 693 | |
Chris Lattner | 6ae0069 | 2010-06-28 22:51:39 +0000 | [diff] [blame] | 694 | uint64_t SrcSize = CGF.CGM.getTargetData().getTypeAllocSize(SrcTy); |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 695 | |
Chris Lattner | 2acc6e3 | 2011-07-18 04:24:23 +0000 | [diff] [blame] | 696 | if (llvm::StructType *DstSTy = dyn_cast<llvm::StructType>(DstTy)) { |
Chris Lattner | e7bb777 | 2010-06-27 06:04:18 +0000 | [diff] [blame] | 697 | DstPtr = EnterStructPointerForCoercedAccess(DstPtr, DstSTy, SrcSize, CGF); |
| 698 | DstTy = cast<llvm::PointerType>(DstPtr->getType())->getElementType(); |
| 699 | } |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 700 | |
Chris Lattner | 6d11cdb | 2010-06-27 06:26:04 +0000 | [diff] [blame] | 701 | // If the source and destination are integer or pointer types, just do an |
| 702 | // extension or truncation to the desired type. |
| 703 | if ((isa<llvm::IntegerType>(SrcTy) || isa<llvm::PointerType>(SrcTy)) && |
| 704 | (isa<llvm::IntegerType>(DstTy) || isa<llvm::PointerType>(DstTy))) { |
| 705 | Src = CoerceIntOrPtrToIntOrPtr(Src, DstTy, CGF); |
| 706 | CGF.Builder.CreateStore(Src, DstPtr, DstIsVolatile); |
| 707 | return; |
| 708 | } |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 709 | |
Duncan Sands | 9408c45 | 2009-05-09 07:08:47 +0000 | [diff] [blame] | 710 | uint64_t DstSize = CGF.CGM.getTargetData().getTypeAllocSize(DstTy); |
Daniel Dunbar | 275e10d | 2009-02-02 19:06:38 +0000 | [diff] [blame] | 711 | |
Daniel Dunbar | 88c2fa9 | 2009-02-03 05:31:23 +0000 | [diff] [blame] | 712 | // If store is legal, just bitcast the src pointer. |
Daniel Dunbar | fdf4986 | 2009-06-05 07:58:54 +0000 | [diff] [blame] | 713 | if (SrcSize <= DstSize) { |
Daniel Dunbar | 275e10d | 2009-02-02 19:06:38 +0000 | [diff] [blame] | 714 | llvm::Value *Casted = |
| 715 | CGF.Builder.CreateBitCast(DstPtr, llvm::PointerType::getUnqual(SrcTy)); |
Daniel Dunbar | 386621f | 2009-02-07 02:46:03 +0000 | [diff] [blame] | 716 | // FIXME: Use better alignment / avoid requiring aligned store. |
Eli Friedman | badea57 | 2011-05-17 21:08:01 +0000 | [diff] [blame] | 717 | BuildAggStore(CGF, Src, Casted, DstIsVolatile, true); |
Daniel Dunbar | 275e10d | 2009-02-02 19:06:38 +0000 | [diff] [blame] | 718 | } else { |
Daniel Dunbar | 275e10d | 2009-02-02 19:06:38 +0000 | [diff] [blame] | 719 | // Otherwise do coercion through memory. This is stupid, but |
| 720 | // simple. |
Daniel Dunbar | fdf4986 | 2009-06-05 07:58:54 +0000 | [diff] [blame] | 721 | |
| 722 | // Generally SrcSize is never greater than DstSize, since this means we are |
| 723 | // losing bits. However, this can happen in cases where the structure has |
| 724 | // additional padding, for example due to a user specified alignment. |
| 725 | // |
| 726 | // FIXME: Assert that we aren't truncating non-padding bits when have access |
| 727 | // to that information. |
Daniel Dunbar | 275e10d | 2009-02-02 19:06:38 +0000 | [diff] [blame] | 728 | llvm::Value *Tmp = CGF.CreateTempAlloca(SrcTy); |
| 729 | CGF.Builder.CreateStore(Src, Tmp); |
Mike Stump | 1eb4433 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 730 | llvm::Value *Casted = |
Daniel Dunbar | 275e10d | 2009-02-02 19:06:38 +0000 | [diff] [blame] | 731 | CGF.Builder.CreateBitCast(Tmp, llvm::PointerType::getUnqual(DstTy)); |
Daniel Dunbar | 386621f | 2009-02-07 02:46:03 +0000 | [diff] [blame] | 732 | llvm::LoadInst *Load = CGF.Builder.CreateLoad(Casted); |
| 733 | // FIXME: Use better alignment / avoid requiring aligned load. |
| 734 | Load->setAlignment(1); |
Anders Carlsson | d2490a9 | 2009-12-24 20:40:36 +0000 | [diff] [blame] | 735 | CGF.Builder.CreateStore(Load, DstPtr, DstIsVolatile); |
Daniel Dunbar | 275e10d | 2009-02-02 19:06:38 +0000 | [diff] [blame] | 736 | } |
| 737 | } |
| 738 | |
Daniel Dunbar | 5627377 | 2008-09-17 00:51:38 +0000 | [diff] [blame] | 739 | /***/ |
| 740 | |
Daniel Dunbar | dacf9dd | 2010-07-14 23:39:36 +0000 | [diff] [blame] | 741 | bool CodeGenModule::ReturnTypeUsesSRet(const CGFunctionInfo &FI) { |
Daniel Dunbar | 11e383a | 2009-02-05 08:00:50 +0000 | [diff] [blame] | 742 | return FI.getReturnInfo().isIndirect(); |
Daniel Dunbar | bb36d33 | 2009-02-02 21:43:58 +0000 | [diff] [blame] | 743 | } |
| 744 | |
Daniel Dunbar | dacf9dd | 2010-07-14 23:39:36 +0000 | [diff] [blame] | 745 | bool CodeGenModule::ReturnTypeUsesFPRet(QualType ResultType) { |
| 746 | if (const BuiltinType *BT = ResultType->getAs<BuiltinType>()) { |
| 747 | switch (BT->getKind()) { |
| 748 | default: |
| 749 | return false; |
| 750 | case BuiltinType::Float: |
Douglas Gregor | bcfd1f5 | 2011-09-02 00:18:52 +0000 | [diff] [blame] | 751 | return getContext().getTargetInfo().useObjCFPRetForRealType(TargetInfo::Float); |
Daniel Dunbar | dacf9dd | 2010-07-14 23:39:36 +0000 | [diff] [blame] | 752 | case BuiltinType::Double: |
Douglas Gregor | bcfd1f5 | 2011-09-02 00:18:52 +0000 | [diff] [blame] | 753 | return getContext().getTargetInfo().useObjCFPRetForRealType(TargetInfo::Double); |
Daniel Dunbar | dacf9dd | 2010-07-14 23:39:36 +0000 | [diff] [blame] | 754 | case BuiltinType::LongDouble: |
Douglas Gregor | bcfd1f5 | 2011-09-02 00:18:52 +0000 | [diff] [blame] | 755 | return getContext().getTargetInfo().useObjCFPRetForRealType( |
Daniel Dunbar | dacf9dd | 2010-07-14 23:39:36 +0000 | [diff] [blame] | 756 | TargetInfo::LongDouble); |
| 757 | } |
| 758 | } |
| 759 | |
| 760 | return false; |
| 761 | } |
| 762 | |
Anders Carlsson | eea6480 | 2011-10-31 16:27:11 +0000 | [diff] [blame] | 763 | bool CodeGenModule::ReturnTypeUsesFP2Ret(QualType ResultType) { |
| 764 | if (const ComplexType *CT = ResultType->getAs<ComplexType>()) { |
| 765 | if (const BuiltinType *BT = CT->getElementType()->getAs<BuiltinType>()) { |
| 766 | if (BT->getKind() == BuiltinType::LongDouble) |
| 767 | return getContext().getTargetInfo().useObjCFP2RetForComplexLongDouble(); |
| 768 | } |
| 769 | } |
| 770 | |
| 771 | return false; |
| 772 | } |
| 773 | |
Chris Lattner | 9cbe4f0 | 2011-07-09 17:41:47 +0000 | [diff] [blame] | 774 | llvm::FunctionType *CodeGenTypes::GetFunctionType(GlobalDecl GD) { |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 775 | const CGFunctionInfo &FI = arrangeGlobalDeclaration(GD); |
| 776 | return GetFunctionType(FI); |
John McCall | c0bf462 | 2010-02-23 00:48:20 +0000 | [diff] [blame] | 777 | } |
| 778 | |
Chris Lattner | 9cbe4f0 | 2011-07-09 17:41:47 +0000 | [diff] [blame] | 779 | llvm::FunctionType * |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 780 | CodeGenTypes::GetFunctionType(const CGFunctionInfo &FI) { |
Chris Lattner | 71305cc | 2011-07-15 05:16:14 +0000 | [diff] [blame] | 781 | |
| 782 | bool Inserted = FunctionsBeingProcessed.insert(&FI); (void)Inserted; |
| 783 | assert(Inserted && "Recursively being processed?"); |
| 784 | |
Chris Lattner | 5f9e272 | 2011-07-23 10:55:15 +0000 | [diff] [blame] | 785 | SmallVector<llvm::Type*, 8> argTypes; |
Chris Lattner | 2acc6e3 | 2011-07-18 04:24:23 +0000 | [diff] [blame] | 786 | llvm::Type *resultType = 0; |
Daniel Dunbar | 45c25ba | 2008-09-10 04:01:49 +0000 | [diff] [blame] | 787 | |
John McCall | 42e0611 | 2011-05-15 02:19:42 +0000 | [diff] [blame] | 788 | const ABIArgInfo &retAI = FI.getReturnInfo(); |
| 789 | switch (retAI.getKind()) { |
Daniel Dunbar | 8951dbd | 2008-09-11 01:48:57 +0000 | [diff] [blame] | 790 | case ABIArgInfo::Expand: |
John McCall | 42e0611 | 2011-05-15 02:19:42 +0000 | [diff] [blame] | 791 | llvm_unreachable("Invalid ABI kind for return argument"); |
Daniel Dunbar | 8951dbd | 2008-09-11 01:48:57 +0000 | [diff] [blame] | 792 | |
Anton Korobeynikov | cc6fa88 | 2009-06-06 09:36:29 +0000 | [diff] [blame] | 793 | case ABIArgInfo::Extend: |
Daniel Dunbar | 46327aa | 2009-02-03 06:17:37 +0000 | [diff] [blame] | 794 | case ABIArgInfo::Direct: |
John McCall | 42e0611 | 2011-05-15 02:19:42 +0000 | [diff] [blame] | 795 | resultType = retAI.getCoerceToType(); |
Daniel Dunbar | 46327aa | 2009-02-03 06:17:37 +0000 | [diff] [blame] | 796 | break; |
| 797 | |
Daniel Dunbar | 11e383a | 2009-02-05 08:00:50 +0000 | [diff] [blame] | 798 | case ABIArgInfo::Indirect: { |
John McCall | 42e0611 | 2011-05-15 02:19:42 +0000 | [diff] [blame] | 799 | assert(!retAI.getIndirectAlign() && "Align unused on indirect return."); |
| 800 | resultType = llvm::Type::getVoidTy(getLLVMContext()); |
| 801 | |
| 802 | QualType ret = FI.getReturnType(); |
Chris Lattner | 2acc6e3 | 2011-07-18 04:24:23 +0000 | [diff] [blame] | 803 | llvm::Type *ty = ConvertType(ret); |
John McCall | 42e0611 | 2011-05-15 02:19:42 +0000 | [diff] [blame] | 804 | unsigned addressSpace = Context.getTargetAddressSpace(ret); |
| 805 | argTypes.push_back(llvm::PointerType::get(ty, addressSpace)); |
Daniel Dunbar | 45c25ba | 2008-09-10 04:01:49 +0000 | [diff] [blame] | 806 | break; |
| 807 | } |
| 808 | |
Daniel Dunbar | 1143492 | 2009-01-26 21:26:08 +0000 | [diff] [blame] | 809 | case ABIArgInfo::Ignore: |
John McCall | 42e0611 | 2011-05-15 02:19:42 +0000 | [diff] [blame] | 810 | resultType = llvm::Type::getVoidTy(getLLVMContext()); |
Daniel Dunbar | 1143492 | 2009-01-26 21:26:08 +0000 | [diff] [blame] | 811 | break; |
Daniel Dunbar | 45c25ba | 2008-09-10 04:01:49 +0000 | [diff] [blame] | 812 | } |
Mike Stump | 1eb4433 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 813 | |
| 814 | for (CGFunctionInfo::const_arg_iterator it = FI.arg_begin(), |
Daniel Dunbar | 88c2fa9 | 2009-02-03 05:31:23 +0000 | [diff] [blame] | 815 | ie = FI.arg_end(); it != ie; ++it) { |
John McCall | 42e0611 | 2011-05-15 02:19:42 +0000 | [diff] [blame] | 816 | const ABIArgInfo &argAI = it->info; |
Mike Stump | 1eb4433 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 817 | |
John McCall | 42e0611 | 2011-05-15 02:19:42 +0000 | [diff] [blame] | 818 | switch (argAI.getKind()) { |
Daniel Dunbar | 1143492 | 2009-01-26 21:26:08 +0000 | [diff] [blame] | 819 | case ABIArgInfo::Ignore: |
| 820 | break; |
| 821 | |
Chris Lattner | 800588f | 2010-07-29 06:26:06 +0000 | [diff] [blame] | 822 | case ABIArgInfo::Indirect: { |
| 823 | // indirect arguments are always on the stack, which is addr space #0. |
Chris Lattner | 2acc6e3 | 2011-07-18 04:24:23 +0000 | [diff] [blame] | 824 | llvm::Type *LTy = ConvertTypeForMem(it->type); |
John McCall | 42e0611 | 2011-05-15 02:19:42 +0000 | [diff] [blame] | 825 | argTypes.push_back(LTy->getPointerTo()); |
Chris Lattner | 800588f | 2010-07-29 06:26:06 +0000 | [diff] [blame] | 826 | break; |
| 827 | } |
| 828 | |
| 829 | case ABIArgInfo::Extend: |
Chris Lattner | 1ed7267 | 2010-07-29 06:44:09 +0000 | [diff] [blame] | 830 | case ABIArgInfo::Direct: { |
Akira Hatanaka | f0cc208 | 2012-01-07 00:25:33 +0000 | [diff] [blame] | 831 | // Insert a padding type to ensure proper alignment. |
| 832 | if (llvm::Type *PaddingType = argAI.getPaddingType()) |
| 833 | argTypes.push_back(PaddingType); |
Chris Lattner | ce70016 | 2010-06-28 23:44:11 +0000 | [diff] [blame] | 834 | // If the coerce-to type is a first class aggregate, flatten it. Either |
| 835 | // way is semantically identical, but fast-isel and the optimizer |
| 836 | // generally likes scalar values better than FCAs. |
Chris Lattner | 9cbe4f0 | 2011-07-09 17:41:47 +0000 | [diff] [blame] | 837 | llvm::Type *argType = argAI.getCoerceToType(); |
Chris Lattner | 2acc6e3 | 2011-07-18 04:24:23 +0000 | [diff] [blame] | 838 | if (llvm::StructType *st = dyn_cast<llvm::StructType>(argType)) { |
John McCall | 42e0611 | 2011-05-15 02:19:42 +0000 | [diff] [blame] | 839 | for (unsigned i = 0, e = st->getNumElements(); i != e; ++i) |
| 840 | argTypes.push_back(st->getElementType(i)); |
Chris Lattner | ce70016 | 2010-06-28 23:44:11 +0000 | [diff] [blame] | 841 | } else { |
John McCall | 42e0611 | 2011-05-15 02:19:42 +0000 | [diff] [blame] | 842 | argTypes.push_back(argType); |
Chris Lattner | ce70016 | 2010-06-28 23:44:11 +0000 | [diff] [blame] | 843 | } |
Daniel Dunbar | 89c9d8e | 2009-02-03 19:12:28 +0000 | [diff] [blame] | 844 | break; |
Chris Lattner | 1ed7267 | 2010-07-29 06:44:09 +0000 | [diff] [blame] | 845 | } |
Mike Stump | 1eb4433 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 846 | |
Daniel Dunbar | 8951dbd | 2008-09-11 01:48:57 +0000 | [diff] [blame] | 847 | case ABIArgInfo::Expand: |
Chris Lattner | 9cbe4f0 | 2011-07-09 17:41:47 +0000 | [diff] [blame] | 848 | GetExpandedTypes(it->type, argTypes); |
Daniel Dunbar | 8951dbd | 2008-09-11 01:48:57 +0000 | [diff] [blame] | 849 | break; |
| 850 | } |
Daniel Dunbar | 45c25ba | 2008-09-10 04:01:49 +0000 | [diff] [blame] | 851 | } |
| 852 | |
Chris Lattner | 71305cc | 2011-07-15 05:16:14 +0000 | [diff] [blame] | 853 | bool Erased = FunctionsBeingProcessed.erase(&FI); (void)Erased; |
| 854 | assert(Erased && "Not in set?"); |
| 855 | |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 856 | return llvm::FunctionType::get(resultType, argTypes, FI.isVariadic()); |
Daniel Dunbar | 3913f18 | 2008-09-09 23:48:28 +0000 | [diff] [blame] | 857 | } |
| 858 | |
Chris Lattner | 2acc6e3 | 2011-07-18 04:24:23 +0000 | [diff] [blame] | 859 | llvm::Type *CodeGenTypes::GetFunctionTypeForVTable(GlobalDecl GD) { |
John McCall | 4c40d98 | 2010-08-31 07:33:07 +0000 | [diff] [blame] | 860 | const CXXMethodDecl *MD = cast<CXXMethodDecl>(GD.getDecl()); |
Anders Carlsson | ecf282b | 2009-11-24 05:08:52 +0000 | [diff] [blame] | 861 | const FunctionProtoType *FPT = MD->getType()->getAs<FunctionProtoType>(); |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 862 | |
Chris Lattner | f742eb0 | 2011-07-10 00:18:59 +0000 | [diff] [blame] | 863 | if (!isFuncTypeConvertible(FPT)) |
| 864 | return llvm::StructType::get(getLLVMContext()); |
| 865 | |
| 866 | const CGFunctionInfo *Info; |
| 867 | if (isa<CXXDestructorDecl>(MD)) |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 868 | Info = &arrangeCXXDestructor(cast<CXXDestructorDecl>(MD), GD.getDtorType()); |
Chris Lattner | f742eb0 | 2011-07-10 00:18:59 +0000 | [diff] [blame] | 869 | else |
John McCall | de5d3c7 | 2012-02-17 03:33:10 +0000 | [diff] [blame] | 870 | Info = &arrangeCXXMethodDeclaration(MD); |
| 871 | return GetFunctionType(*Info); |
Anders Carlsson | ecf282b | 2009-11-24 05:08:52 +0000 | [diff] [blame] | 872 | } |
| 873 | |
Daniel Dunbar | a0a99e0 | 2009-02-02 23:43:58 +0000 | [diff] [blame] | 874 | void CodeGenModule::ConstructAttributeList(const CGFunctionInfo &FI, |
Daniel Dunbar | 88b5396 | 2009-02-02 22:03:45 +0000 | [diff] [blame] | 875 | const Decl *TargetDecl, |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 876 | AttributeListType &PAL, |
Daniel Dunbar | ca6408c | 2009-09-12 00:59:20 +0000 | [diff] [blame] | 877 | unsigned &CallingConv) { |
Kostya Serebryany | c891666 | 2012-01-20 17:57:16 +0000 | [diff] [blame] | 878 | llvm::Attributes FuncAttrs; |
| 879 | llvm::Attributes RetAttrs; |
Daniel Dunbar | 5323a4b | 2008-09-10 00:32:18 +0000 | [diff] [blame] | 880 | |
Daniel Dunbar | ca6408c | 2009-09-12 00:59:20 +0000 | [diff] [blame] | 881 | CallingConv = FI.getEffectiveCallingConvention(); |
| 882 | |
John McCall | 04a67a6 | 2010-02-05 21:31:56 +0000 | [diff] [blame] | 883 | if (FI.isNoReturn()) |
| 884 | FuncAttrs |= llvm::Attribute::NoReturn; |
| 885 | |
Anton Korobeynikov | 1102f42 | 2009-04-04 00:49:24 +0000 | [diff] [blame] | 886 | // FIXME: handle sseregparm someday... |
Daniel Dunbar | 5323a4b | 2008-09-10 00:32:18 +0000 | [diff] [blame] | 887 | if (TargetDecl) { |
Rafael Espindola | 6700415 | 2011-10-12 19:51:18 +0000 | [diff] [blame] | 888 | if (TargetDecl->hasAttr<ReturnsTwiceAttr>()) |
| 889 | FuncAttrs |= llvm::Attribute::ReturnsTwice; |
Argyrios Kyrtzidis | 40b598e | 2009-06-30 02:34:44 +0000 | [diff] [blame] | 890 | if (TargetDecl->hasAttr<NoThrowAttr>()) |
Devang Patel | 761d7f7 | 2008-09-25 21:02:23 +0000 | [diff] [blame] | 891 | FuncAttrs |= llvm::Attribute::NoUnwind; |
John McCall | 9c0c1f3 | 2010-07-08 06:48:12 +0000 | [diff] [blame] | 892 | else if (const FunctionDecl *Fn = dyn_cast<FunctionDecl>(TargetDecl)) { |
| 893 | const FunctionProtoType *FPT = Fn->getType()->getAs<FunctionProtoType>(); |
Sebastian Redl | 8026f6d | 2011-03-13 17:09:40 +0000 | [diff] [blame] | 894 | if (FPT && FPT->isNothrow(getContext())) |
John McCall | 9c0c1f3 | 2010-07-08 06:48:12 +0000 | [diff] [blame] | 895 | FuncAttrs |= llvm::Attribute::NoUnwind; |
| 896 | } |
| 897 | |
Argyrios Kyrtzidis | 40b598e | 2009-06-30 02:34:44 +0000 | [diff] [blame] | 898 | if (TargetDecl->hasAttr<NoReturnAttr>()) |
Devang Patel | 761d7f7 | 2008-09-25 21:02:23 +0000 | [diff] [blame] | 899 | FuncAttrs |= llvm::Attribute::NoReturn; |
Eric Christopher | 041087c | 2011-08-15 22:38:22 +0000 | [diff] [blame] | 900 | |
Rafael Espindola | f87cced | 2011-10-03 14:59:42 +0000 | [diff] [blame] | 901 | if (TargetDecl->hasAttr<ReturnsTwiceAttr>()) |
| 902 | FuncAttrs |= llvm::Attribute::ReturnsTwice; |
| 903 | |
Eric Christopher | 041087c | 2011-08-15 22:38:22 +0000 | [diff] [blame] | 904 | // 'const' and 'pure' attribute functions are also nounwind. |
| 905 | if (TargetDecl->hasAttr<ConstAttr>()) { |
Anders Carlsson | 232eb7d | 2008-10-05 23:32:53 +0000 | [diff] [blame] | 906 | FuncAttrs |= llvm::Attribute::ReadNone; |
Eric Christopher | 041087c | 2011-08-15 22:38:22 +0000 | [diff] [blame] | 907 | FuncAttrs |= llvm::Attribute::NoUnwind; |
| 908 | } else if (TargetDecl->hasAttr<PureAttr>()) { |
Daniel Dunbar | 64c2e07 | 2009-04-10 22:14:52 +0000 | [diff] [blame] | 909 | FuncAttrs |= llvm::Attribute::ReadOnly; |
Eric Christopher | 041087c | 2011-08-15 22:38:22 +0000 | [diff] [blame] | 910 | FuncAttrs |= llvm::Attribute::NoUnwind; |
| 911 | } |
Ryan Flynn | 76168e2 | 2009-08-09 20:07:29 +0000 | [diff] [blame] | 912 | if (TargetDecl->hasAttr<MallocAttr>()) |
| 913 | RetAttrs |= llvm::Attribute::NoAlias; |
Daniel Dunbar | 5323a4b | 2008-09-10 00:32:18 +0000 | [diff] [blame] | 914 | } |
| 915 | |
Chandler Carruth | 2811ccf | 2009-11-12 17:24:48 +0000 | [diff] [blame] | 916 | if (CodeGenOpts.OptimizeSize) |
Daniel Dunbar | 7ab1c3e | 2009-10-27 19:48:08 +0000 | [diff] [blame] | 917 | FuncAttrs |= llvm::Attribute::OptimizeForSize; |
Chandler Carruth | 2811ccf | 2009-11-12 17:24:48 +0000 | [diff] [blame] | 918 | if (CodeGenOpts.DisableRedZone) |
Devang Patel | 24095da | 2009-06-04 23:32:02 +0000 | [diff] [blame] | 919 | FuncAttrs |= llvm::Attribute::NoRedZone; |
Chandler Carruth | 2811ccf | 2009-11-12 17:24:48 +0000 | [diff] [blame] | 920 | if (CodeGenOpts.NoImplicitFloat) |
Devang Patel | acebb39 | 2009-06-05 22:05:48 +0000 | [diff] [blame] | 921 | FuncAttrs |= llvm::Attribute::NoImplicitFloat; |
Devang Patel | 24095da | 2009-06-04 23:32:02 +0000 | [diff] [blame] | 922 | |
Daniel Dunbar | a0a99e0 | 2009-02-02 23:43:58 +0000 | [diff] [blame] | 923 | QualType RetTy = FI.getReturnType(); |
Daniel Dunbar | 5323a4b | 2008-09-10 00:32:18 +0000 | [diff] [blame] | 924 | unsigned Index = 1; |
Daniel Dunbar | b225be4 | 2009-02-03 05:59:18 +0000 | [diff] [blame] | 925 | const ABIArgInfo &RetAI = FI.getReturnInfo(); |
Daniel Dunbar | 45c25ba | 2008-09-10 04:01:49 +0000 | [diff] [blame] | 926 | switch (RetAI.getKind()) { |
Anton Korobeynikov | cc6fa88 | 2009-06-06 09:36:29 +0000 | [diff] [blame] | 927 | case ABIArgInfo::Extend: |
Chris Lattner | 2eb9cdd | 2010-07-28 23:46:15 +0000 | [diff] [blame] | 928 | if (RetTy->hasSignedIntegerRepresentation()) |
Anton Korobeynikov | cc6fa88 | 2009-06-06 09:36:29 +0000 | [diff] [blame] | 929 | RetAttrs |= llvm::Attribute::SExt; |
Chris Lattner | 2eb9cdd | 2010-07-28 23:46:15 +0000 | [diff] [blame] | 930 | else if (RetTy->hasUnsignedIntegerRepresentation()) |
Anton Korobeynikov | cc6fa88 | 2009-06-06 09:36:29 +0000 | [diff] [blame] | 931 | RetAttrs |= llvm::Attribute::ZExt; |
Chris Lattner | 800588f | 2010-07-29 06:26:06 +0000 | [diff] [blame] | 932 | break; |
Daniel Dunbar | 46327aa | 2009-02-03 06:17:37 +0000 | [diff] [blame] | 933 | case ABIArgInfo::Direct: |
Chris Lattner | 800588f | 2010-07-29 06:26:06 +0000 | [diff] [blame] | 934 | case ABIArgInfo::Ignore: |
Daniel Dunbar | 2c8e0f3 | 2008-09-10 02:41:04 +0000 | [diff] [blame] | 935 | break; |
| 936 | |
Daniel Dunbar | 11e383a | 2009-02-05 08:00:50 +0000 | [diff] [blame] | 937 | case ABIArgInfo::Indirect: |
Mike Stump | 1eb4433 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 938 | PAL.push_back(llvm::AttributeWithIndex::get(Index, |
Chris Lattner | fb97cf2 | 2010-04-20 05:44:43 +0000 | [diff] [blame] | 939 | llvm::Attribute::StructRet)); |
Daniel Dunbar | 5323a4b | 2008-09-10 00:32:18 +0000 | [diff] [blame] | 940 | ++Index; |
Daniel Dunbar | 0ac86f0 | 2009-03-18 19:51:01 +0000 | [diff] [blame] | 941 | // sret disables readnone and readonly |
| 942 | FuncAttrs &= ~(llvm::Attribute::ReadOnly | |
| 943 | llvm::Attribute::ReadNone); |
Daniel Dunbar | 2c8e0f3 | 2008-09-10 02:41:04 +0000 | [diff] [blame] | 944 | break; |
| 945 | |
Daniel Dunbar | 8951dbd | 2008-09-11 01:48:57 +0000 | [diff] [blame] | 946 | case ABIArgInfo::Expand: |
David Blaikie | b219cfc | 2011-09-23 05:06:16 +0000 | [diff] [blame] | 947 | llvm_unreachable("Invalid ABI kind for return argument"); |
Daniel Dunbar | 5323a4b | 2008-09-10 00:32:18 +0000 | [diff] [blame] | 948 | } |
Daniel Dunbar | 2c8e0f3 | 2008-09-10 02:41:04 +0000 | [diff] [blame] | 949 | |
Devang Patel | a2c6912 | 2008-09-26 22:53:57 +0000 | [diff] [blame] | 950 | if (RetAttrs) |
| 951 | PAL.push_back(llvm::AttributeWithIndex::get(0, RetAttrs)); |
Anton Korobeynikov | 1102f42 | 2009-04-04 00:49:24 +0000 | [diff] [blame] | 952 | |
Daniel Dunbar | 17d3fea | 2011-02-09 17:54:19 +0000 | [diff] [blame] | 953 | // FIXME: RegParm should be reduced in case of global register variable. |
Eli Friedman | a49218e | 2011-04-09 08:18:08 +0000 | [diff] [blame] | 954 | signed RegParm; |
| 955 | if (FI.getHasRegParm()) |
| 956 | RegParm = FI.getRegParm(); |
| 957 | else |
Daniel Dunbar | 17d3fea | 2011-02-09 17:54:19 +0000 | [diff] [blame] | 958 | RegParm = CodeGenOpts.NumRegisterParameters; |
Anton Korobeynikov | 1102f42 | 2009-04-04 00:49:24 +0000 | [diff] [blame] | 959 | |
Douglas Gregor | bcfd1f5 | 2011-09-02 00:18:52 +0000 | [diff] [blame] | 960 | unsigned PointerWidth = getContext().getTargetInfo().getPointerWidth(0); |
Mike Stump | 1eb4433 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 961 | for (CGFunctionInfo::const_arg_iterator it = FI.arg_begin(), |
Daniel Dunbar | 88c2fa9 | 2009-02-03 05:31:23 +0000 | [diff] [blame] | 962 | ie = FI.arg_end(); it != ie; ++it) { |
| 963 | QualType ParamType = it->type; |
| 964 | const ABIArgInfo &AI = it->info; |
Kostya Serebryany | c891666 | 2012-01-20 17:57:16 +0000 | [diff] [blame] | 965 | llvm::Attributes Attrs; |
Anton Korobeynikov | 1102f42 | 2009-04-04 00:49:24 +0000 | [diff] [blame] | 966 | |
John McCall | d8e10d2 | 2010-03-27 00:47:27 +0000 | [diff] [blame] | 967 | // 'restrict' -> 'noalias' is done in EmitFunctionProlog when we |
| 968 | // have the corresponding parameter variable. It doesn't make |
Daniel Dunbar | 7f6890e | 2011-02-10 18:10:07 +0000 | [diff] [blame] | 969 | // sense to do it here because parameters are so messed up. |
Daniel Dunbar | 8951dbd | 2008-09-11 01:48:57 +0000 | [diff] [blame] | 970 | switch (AI.getKind()) { |
Chris Lattner | 800588f | 2010-07-29 06:26:06 +0000 | [diff] [blame] | 971 | case ABIArgInfo::Extend: |
Douglas Gregor | 575a1c9 | 2011-05-20 16:38:50 +0000 | [diff] [blame] | 972 | if (ParamType->isSignedIntegerOrEnumerationType()) |
Kostya Serebryany | c891666 | 2012-01-20 17:57:16 +0000 | [diff] [blame] | 973 | Attrs |= llvm::Attribute::SExt; |
Douglas Gregor | 575a1c9 | 2011-05-20 16:38:50 +0000 | [diff] [blame] | 974 | else if (ParamType->isUnsignedIntegerOrEnumerationType()) |
Kostya Serebryany | c891666 | 2012-01-20 17:57:16 +0000 | [diff] [blame] | 975 | Attrs |= llvm::Attribute::ZExt; |
Chris Lattner | 800588f | 2010-07-29 06:26:06 +0000 | [diff] [blame] | 976 | // FALL THROUGH |
| 977 | case ABIArgInfo::Direct: |
| 978 | if (RegParm > 0 && |
Rafael Espindola | 2871020 | 2011-11-27 18:35:39 +0000 | [diff] [blame] | 979 | (ParamType->isIntegerType() || ParamType->isPointerType() || |
| 980 | ParamType->isReferenceType())) { |
Chris Lattner | 800588f | 2010-07-29 06:26:06 +0000 | [diff] [blame] | 981 | RegParm -= |
| 982 | (Context.getTypeSize(ParamType) + PointerWidth - 1) / PointerWidth; |
| 983 | if (RegParm >= 0) |
Kostya Serebryany | c891666 | 2012-01-20 17:57:16 +0000 | [diff] [blame] | 984 | Attrs |= llvm::Attribute::InReg; |
Chris Lattner | 800588f | 2010-07-29 06:26:06 +0000 | [diff] [blame] | 985 | } |
| 986 | // FIXME: handle sseregparm someday... |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 987 | |
Akira Hatanaka | f0cc208 | 2012-01-07 00:25:33 +0000 | [diff] [blame] | 988 | // Increment Index if there is padding. |
| 989 | Index += (AI.getPaddingType() != 0); |
| 990 | |
Chris Lattner | 2acc6e3 | 2011-07-18 04:24:23 +0000 | [diff] [blame] | 991 | if (llvm::StructType *STy = |
Chris Lattner | 800588f | 2010-07-29 06:26:06 +0000 | [diff] [blame] | 992 | dyn_cast<llvm::StructType>(AI.getCoerceToType())) |
| 993 | Index += STy->getNumElements()-1; // 1 will be added below. |
| 994 | break; |
Daniel Dunbar | 89c9d8e | 2009-02-03 19:12:28 +0000 | [diff] [blame] | 995 | |
Daniel Dunbar | 11e383a | 2009-02-05 08:00:50 +0000 | [diff] [blame] | 996 | case ABIArgInfo::Indirect: |
Anders Carlsson | 0a8f847 | 2009-09-16 15:53:40 +0000 | [diff] [blame] | 997 | if (AI.getIndirectByVal()) |
Kostya Serebryany | c891666 | 2012-01-20 17:57:16 +0000 | [diff] [blame] | 998 | Attrs |= llvm::Attribute::ByVal; |
Anders Carlsson | 0a8f847 | 2009-09-16 15:53:40 +0000 | [diff] [blame] | 999 | |
Kostya Serebryany | c891666 | 2012-01-20 17:57:16 +0000 | [diff] [blame] | 1000 | Attrs |= |
Daniel Dunbar | 11e383a | 2009-02-05 08:00:50 +0000 | [diff] [blame] | 1001 | llvm::Attribute::constructAlignmentFromInt(AI.getIndirectAlign()); |
Daniel Dunbar | 0ac86f0 | 2009-03-18 19:51:01 +0000 | [diff] [blame] | 1002 | // byval disables readnone and readonly. |
| 1003 | FuncAttrs &= ~(llvm::Attribute::ReadOnly | |
| 1004 | llvm::Attribute::ReadNone); |
Daniel Dunbar | 8951dbd | 2008-09-11 01:48:57 +0000 | [diff] [blame] | 1005 | break; |
Anton Korobeynikov | cc6fa88 | 2009-06-06 09:36:29 +0000 | [diff] [blame] | 1006 | |
Daniel Dunbar | 1143492 | 2009-01-26 21:26:08 +0000 | [diff] [blame] | 1007 | case ABIArgInfo::Ignore: |
| 1008 | // Skip increment, no matching LLVM parameter. |
Mike Stump | 1eb4433 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 1009 | continue; |
Daniel Dunbar | 1143492 | 2009-01-26 21:26:08 +0000 | [diff] [blame] | 1010 | |
Daniel Dunbar | 5627377 | 2008-09-17 00:51:38 +0000 | [diff] [blame] | 1011 | case ABIArgInfo::Expand: { |
Chris Lattner | 5f9e272 | 2011-07-23 10:55:15 +0000 | [diff] [blame] | 1012 | SmallVector<llvm::Type*, 8> types; |
Mike Stump | f5408fe | 2009-05-16 07:57:57 +0000 | [diff] [blame] | 1013 | // FIXME: This is rather inefficient. Do we ever actually need to do |
| 1014 | // anything here? The result should be just reconstructed on the other |
| 1015 | // side, so extension should be a non-issue. |
Chris Lattner | 9cbe4f0 | 2011-07-09 17:41:47 +0000 | [diff] [blame] | 1016 | getTypes().GetExpandedTypes(ParamType, types); |
John McCall | 42e0611 | 2011-05-15 02:19:42 +0000 | [diff] [blame] | 1017 | Index += types.size(); |
Daniel Dunbar | 5627377 | 2008-09-17 00:51:38 +0000 | [diff] [blame] | 1018 | continue; |
| 1019 | } |
Daniel Dunbar | 5323a4b | 2008-09-10 00:32:18 +0000 | [diff] [blame] | 1020 | } |
Mike Stump | 1eb4433 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 1021 | |
Kostya Serebryany | c891666 | 2012-01-20 17:57:16 +0000 | [diff] [blame] | 1022 | if (Attrs) |
| 1023 | PAL.push_back(llvm::AttributeWithIndex::get(Index, Attrs)); |
Daniel Dunbar | 5627377 | 2008-09-17 00:51:38 +0000 | [diff] [blame] | 1024 | ++Index; |
Daniel Dunbar | 5323a4b | 2008-09-10 00:32:18 +0000 | [diff] [blame] | 1025 | } |
Devang Patel | a2c6912 | 2008-09-26 22:53:57 +0000 | [diff] [blame] | 1026 | if (FuncAttrs) |
| 1027 | PAL.push_back(llvm::AttributeWithIndex::get(~0, FuncAttrs)); |
Daniel Dunbar | 5323a4b | 2008-09-10 00:32:18 +0000 | [diff] [blame] | 1028 | } |
| 1029 | |
John McCall | d26bc76 | 2011-03-09 04:27:21 +0000 | [diff] [blame] | 1030 | /// An argument came in as a promoted argument; demote it back to its |
| 1031 | /// declared type. |
| 1032 | static llvm::Value *emitArgumentDemotion(CodeGenFunction &CGF, |
| 1033 | const VarDecl *var, |
| 1034 | llvm::Value *value) { |
Chris Lattner | 2acc6e3 | 2011-07-18 04:24:23 +0000 | [diff] [blame] | 1035 | llvm::Type *varType = CGF.ConvertType(var->getType()); |
John McCall | d26bc76 | 2011-03-09 04:27:21 +0000 | [diff] [blame] | 1036 | |
| 1037 | // This can happen with promotions that actually don't change the |
| 1038 | // underlying type, like the enum promotions. |
| 1039 | if (value->getType() == varType) return value; |
| 1040 | |
| 1041 | assert((varType->isIntegerTy() || varType->isFloatingPointTy()) |
| 1042 | && "unexpected promotion type"); |
| 1043 | |
| 1044 | if (isa<llvm::IntegerType>(varType)) |
| 1045 | return CGF.Builder.CreateTrunc(value, varType, "arg.unpromote"); |
| 1046 | |
| 1047 | return CGF.Builder.CreateFPCast(value, varType, "arg.unpromote"); |
| 1048 | } |
| 1049 | |
Daniel Dunbar | 88b5396 | 2009-02-02 22:03:45 +0000 | [diff] [blame] | 1050 | void CodeGenFunction::EmitFunctionProlog(const CGFunctionInfo &FI, |
| 1051 | llvm::Function *Fn, |
Daniel Dunbar | 17b708d | 2008-09-09 23:27:19 +0000 | [diff] [blame] | 1052 | const FunctionArgList &Args) { |
John McCall | 0cfeb63 | 2009-07-28 01:00:58 +0000 | [diff] [blame] | 1053 | // If this is an implicit-return-zero function, go ahead and |
| 1054 | // initialize the return value. TODO: it might be nice to have |
| 1055 | // a more general mechanism for this that didn't require synthesized |
| 1056 | // return statements. |
Chris Lattner | 121b3fa | 2010-07-05 20:21:00 +0000 | [diff] [blame] | 1057 | if (const FunctionDecl *FD = dyn_cast_or_null<FunctionDecl>(CurFuncDecl)) { |
John McCall | 0cfeb63 | 2009-07-28 01:00:58 +0000 | [diff] [blame] | 1058 | if (FD->hasImplicitReturnZero()) { |
| 1059 | QualType RetTy = FD->getResultType().getUnqualifiedType(); |
Chris Lattner | 2acc6e3 | 2011-07-18 04:24:23 +0000 | [diff] [blame] | 1060 | llvm::Type* LLVMTy = CGM.getTypes().ConvertType(RetTy); |
Owen Anderson | c9c88b4 | 2009-07-31 20:28:54 +0000 | [diff] [blame] | 1061 | llvm::Constant* Zero = llvm::Constant::getNullValue(LLVMTy); |
John McCall | 0cfeb63 | 2009-07-28 01:00:58 +0000 | [diff] [blame] | 1062 | Builder.CreateStore(Zero, ReturnValue); |
| 1063 | } |
| 1064 | } |
| 1065 | |
Mike Stump | f5408fe | 2009-05-16 07:57:57 +0000 | [diff] [blame] | 1066 | // FIXME: We no longer need the types from FunctionArgList; lift up and |
| 1067 | // simplify. |
Daniel Dunbar | 5251afa | 2009-02-03 06:02:10 +0000 | [diff] [blame] | 1068 | |
Daniel Dunbar | 17b708d | 2008-09-09 23:27:19 +0000 | [diff] [blame] | 1069 | // Emit allocs for param decls. Give the LLVM Argument nodes names. |
| 1070 | llvm::Function::arg_iterator AI = Fn->arg_begin(); |
Mike Stump | 1eb4433 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 1071 | |
Daniel Dunbar | 17b708d | 2008-09-09 23:27:19 +0000 | [diff] [blame] | 1072 | // Name the struct return argument. |
Daniel Dunbar | dacf9dd | 2010-07-14 23:39:36 +0000 | [diff] [blame] | 1073 | if (CGM.ReturnTypeUsesSRet(FI)) { |
Daniel Dunbar | 17b708d | 2008-09-09 23:27:19 +0000 | [diff] [blame] | 1074 | AI->setName("agg.result"); |
John McCall | 410ffb2 | 2011-08-25 23:04:34 +0000 | [diff] [blame] | 1075 | AI->addAttr(llvm::Attribute::NoAlias); |
Daniel Dunbar | 17b708d | 2008-09-09 23:27:19 +0000 | [diff] [blame] | 1076 | ++AI; |
| 1077 | } |
Mike Stump | 1eb4433 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 1078 | |
Daniel Dunbar | 4b5f0a4 | 2009-02-04 21:17:21 +0000 | [diff] [blame] | 1079 | assert(FI.arg_size() == Args.size() && |
| 1080 | "Mismatch between function signature & arguments."); |
Devang Patel | 093ac46 | 2011-03-03 20:13:15 +0000 | [diff] [blame] | 1081 | unsigned ArgNo = 1; |
Daniel Dunbar | b225be4 | 2009-02-03 05:59:18 +0000 | [diff] [blame] | 1082 | CGFunctionInfo::const_arg_iterator info_it = FI.arg_begin(); |
Devang Patel | 093ac46 | 2011-03-03 20:13:15 +0000 | [diff] [blame] | 1083 | for (FunctionArgList::const_iterator i = Args.begin(), e = Args.end(); |
| 1084 | i != e; ++i, ++info_it, ++ArgNo) { |
John McCall | d26bc76 | 2011-03-09 04:27:21 +0000 | [diff] [blame] | 1085 | const VarDecl *Arg = *i; |
Daniel Dunbar | b225be4 | 2009-02-03 05:59:18 +0000 | [diff] [blame] | 1086 | QualType Ty = info_it->type; |
| 1087 | const ABIArgInfo &ArgI = info_it->info; |
Daniel Dunbar | 8951dbd | 2008-09-11 01:48:57 +0000 | [diff] [blame] | 1088 | |
John McCall | d26bc76 | 2011-03-09 04:27:21 +0000 | [diff] [blame] | 1089 | bool isPromoted = |
| 1090 | isa<ParmVarDecl>(Arg) && cast<ParmVarDecl>(Arg)->isKNRPromoted(); |
| 1091 | |
Daniel Dunbar | 8951dbd | 2008-09-11 01:48:57 +0000 | [diff] [blame] | 1092 | switch (ArgI.getKind()) { |
Daniel Dunbar | 1f74598 | 2009-02-05 09:16:39 +0000 | [diff] [blame] | 1093 | case ABIArgInfo::Indirect: { |
Chris Lattner | ce70016 | 2010-06-28 23:44:11 +0000 | [diff] [blame] | 1094 | llvm::Value *V = AI; |
Daniel Dunbar | cf3b6f2 | 2010-09-16 20:42:02 +0000 | [diff] [blame] | 1095 | |
Daniel Dunbar | 1f74598 | 2009-02-05 09:16:39 +0000 | [diff] [blame] | 1096 | if (hasAggregateLLVMType(Ty)) { |
Daniel Dunbar | cf3b6f2 | 2010-09-16 20:42:02 +0000 | [diff] [blame] | 1097 | // Aggregates and complex variables are accessed by reference. All we |
| 1098 | // need to do is realign the value, if requested |
| 1099 | if (ArgI.getIndirectRealign()) { |
| 1100 | llvm::Value *AlignedTemp = CreateMemTemp(Ty, "coerce"); |
| 1101 | |
| 1102 | // Copy from the incoming argument pointer to the temporary with the |
| 1103 | // appropriate alignment. |
| 1104 | // |
| 1105 | // FIXME: We should have a common utility for generating an aggregate |
| 1106 | // copy. |
Chris Lattner | 2acc6e3 | 2011-07-18 04:24:23 +0000 | [diff] [blame] | 1107 | llvm::Type *I8PtrTy = Builder.getInt8PtrTy(); |
Ken Dyck | fe71008 | 2011-01-19 01:58:38 +0000 | [diff] [blame] | 1108 | CharUnits Size = getContext().getTypeSizeInChars(Ty); |
NAKAMURA Takumi | c95a8fc | 2011-03-10 14:02:21 +0000 | [diff] [blame] | 1109 | llvm::Value *Dst = Builder.CreateBitCast(AlignedTemp, I8PtrTy); |
| 1110 | llvm::Value *Src = Builder.CreateBitCast(V, I8PtrTy); |
| 1111 | Builder.CreateMemCpy(Dst, |
| 1112 | Src, |
Ken Dyck | fe71008 | 2011-01-19 01:58:38 +0000 | [diff] [blame] | 1113 | llvm::ConstantInt::get(IntPtrTy, |
| 1114 | Size.getQuantity()), |
Benjamin Kramer | 9f0c7cc | 2010-12-30 00:13:21 +0000 | [diff] [blame] | 1115 | ArgI.getIndirectAlign(), |
| 1116 | false); |
Daniel Dunbar | cf3b6f2 | 2010-09-16 20:42:02 +0000 | [diff] [blame] | 1117 | V = AlignedTemp; |
| 1118 | } |
Daniel Dunbar | 1f74598 | 2009-02-05 09:16:39 +0000 | [diff] [blame] | 1119 | } else { |
| 1120 | // Load scalar value from indirect argument. |
Ken Dyck | fe71008 | 2011-01-19 01:58:38 +0000 | [diff] [blame] | 1121 | CharUnits Alignment = getContext().getTypeAlignInChars(Ty); |
| 1122 | V = EmitLoadOfScalar(V, false, Alignment.getQuantity(), Ty); |
John McCall | d26bc76 | 2011-03-09 04:27:21 +0000 | [diff] [blame] | 1123 | |
| 1124 | if (isPromoted) |
| 1125 | V = emitArgumentDemotion(*this, Arg, V); |
Daniel Dunbar | 1f74598 | 2009-02-05 09:16:39 +0000 | [diff] [blame] | 1126 | } |
Devang Patel | 093ac46 | 2011-03-03 20:13:15 +0000 | [diff] [blame] | 1127 | EmitParmDecl(*Arg, V, ArgNo); |
Daniel Dunbar | 1f74598 | 2009-02-05 09:16:39 +0000 | [diff] [blame] | 1128 | break; |
| 1129 | } |
Anton Korobeynikov | cc6fa88 | 2009-06-06 09:36:29 +0000 | [diff] [blame] | 1130 | |
| 1131 | case ABIArgInfo::Extend: |
Daniel Dunbar | 46327aa | 2009-02-03 06:17:37 +0000 | [diff] [blame] | 1132 | case ABIArgInfo::Direct: { |
Akira Hatanaka | 4ba3fd4 | 2012-01-09 19:08:06 +0000 | [diff] [blame] | 1133 | // Skip the dummy padding argument. |
| 1134 | if (ArgI.getPaddingType()) |
| 1135 | ++AI; |
| 1136 | |
Chris Lattner | 800588f | 2010-07-29 06:26:06 +0000 | [diff] [blame] | 1137 | // If we have the trivial case, handle it with no muss and fuss. |
| 1138 | if (!isa<llvm::StructType>(ArgI.getCoerceToType()) && |
Chris Lattner | 117e3f4 | 2010-07-30 04:02:24 +0000 | [diff] [blame] | 1139 | ArgI.getCoerceToType() == ConvertType(Ty) && |
| 1140 | ArgI.getDirectOffset() == 0) { |
Chris Lattner | 800588f | 2010-07-29 06:26:06 +0000 | [diff] [blame] | 1141 | assert(AI != Fn->arg_end() && "Argument mismatch!"); |
| 1142 | llvm::Value *V = AI; |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 1143 | |
John McCall | d8e10d2 | 2010-03-27 00:47:27 +0000 | [diff] [blame] | 1144 | if (Arg->getType().isRestrictQualified()) |
| 1145 | AI->addAttr(llvm::Attribute::NoAlias); |
| 1146 | |
Chris Lattner | b13eab9 | 2011-07-20 06:29:00 +0000 | [diff] [blame] | 1147 | // Ensure the argument is the correct type. |
| 1148 | if (V->getType() != ArgI.getCoerceToType()) |
| 1149 | V = Builder.CreateBitCast(V, ArgI.getCoerceToType()); |
| 1150 | |
John McCall | d26bc76 | 2011-03-09 04:27:21 +0000 | [diff] [blame] | 1151 | if (isPromoted) |
| 1152 | V = emitArgumentDemotion(*this, Arg, V); |
Chris Lattner | b13eab9 | 2011-07-20 06:29:00 +0000 | [diff] [blame] | 1153 | |
Devang Patel | 093ac46 | 2011-03-03 20:13:15 +0000 | [diff] [blame] | 1154 | EmitParmDecl(*Arg, V, ArgNo); |
Chris Lattner | 800588f | 2010-07-29 06:26:06 +0000 | [diff] [blame] | 1155 | break; |
Daniel Dunbar | 8b979d9 | 2009-02-10 00:06:49 +0000 | [diff] [blame] | 1156 | } |
Mike Stump | 1eb4433 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 1157 | |
Evgeniy Stepanov | a6ce20e | 2012-02-10 09:30:15 +0000 | [diff] [blame] | 1158 | llvm::AllocaInst *Alloca = CreateMemTemp(Ty, Arg->getName()); |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 1159 | |
Chris Lattner | deabde2 | 2010-07-28 18:24:28 +0000 | [diff] [blame] | 1160 | // The alignment we need to use is the max of the requested alignment for |
| 1161 | // the argument plus the alignment required by our access code below. |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 1162 | unsigned AlignmentToUse = |
John McCall | d16c2cf | 2011-02-08 08:22:06 +0000 | [diff] [blame] | 1163 | CGM.getTargetData().getABITypeAlignment(ArgI.getCoerceToType()); |
Chris Lattner | deabde2 | 2010-07-28 18:24:28 +0000 | [diff] [blame] | 1164 | AlignmentToUse = std::max(AlignmentToUse, |
| 1165 | (unsigned)getContext().getDeclAlign(Arg).getQuantity()); |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 1166 | |
Chris Lattner | deabde2 | 2010-07-28 18:24:28 +0000 | [diff] [blame] | 1167 | Alloca->setAlignment(AlignmentToUse); |
Chris Lattner | 121b3fa | 2010-07-05 20:21:00 +0000 | [diff] [blame] | 1168 | llvm::Value *V = Alloca; |
Chris Lattner | 117e3f4 | 2010-07-30 04:02:24 +0000 | [diff] [blame] | 1169 | llvm::Value *Ptr = V; // Pointer to store into. |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 1170 | |
Chris Lattner | 117e3f4 | 2010-07-30 04:02:24 +0000 | [diff] [blame] | 1171 | // If the value is offset in memory, apply the offset now. |
| 1172 | if (unsigned Offs = ArgI.getDirectOffset()) { |
| 1173 | Ptr = Builder.CreateBitCast(Ptr, Builder.getInt8PtrTy()); |
| 1174 | Ptr = Builder.CreateConstGEP1_32(Ptr, Offs); |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 1175 | Ptr = Builder.CreateBitCast(Ptr, |
Chris Lattner | 117e3f4 | 2010-07-30 04:02:24 +0000 | [diff] [blame] | 1176 | llvm::PointerType::getUnqual(ArgI.getCoerceToType())); |
| 1177 | } |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 1178 | |
Chris Lattner | 309c59f | 2010-06-29 00:06:42 +0000 | [diff] [blame] | 1179 | // If the coerce-to type is a first class aggregate, we flatten it and |
| 1180 | // pass the elements. Either way is semantically identical, but fast-isel |
| 1181 | // and the optimizer generally likes scalar values better than FCAs. |
Evgeniy Stepanov | a6ce20e | 2012-02-10 09:30:15 +0000 | [diff] [blame] | 1182 | llvm::StructType *STy = dyn_cast<llvm::StructType>(ArgI.getCoerceToType()); |
| 1183 | if (STy && STy->getNumElements() > 1) { |
| 1184 | uint64_t SrcSize = CGM.getTargetData().getTypeAllocSize(STy); |
| 1185 | llvm::Type *DstTy = |
| 1186 | cast<llvm::PointerType>(Ptr->getType())->getElementType(); |
| 1187 | uint64_t DstSize = CGM.getTargetData().getTypeAllocSize(DstTy); |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 1188 | |
Evgeniy Stepanov | a6ce20e | 2012-02-10 09:30:15 +0000 | [diff] [blame] | 1189 | if (SrcSize <= DstSize) { |
| 1190 | Ptr = Builder.CreateBitCast(Ptr, llvm::PointerType::getUnqual(STy)); |
| 1191 | |
| 1192 | for (unsigned i = 0, e = STy->getNumElements(); i != e; ++i) { |
| 1193 | assert(AI != Fn->arg_end() && "Argument mismatch!"); |
| 1194 | AI->setName(Arg->getName() + ".coerce" + Twine(i)); |
| 1195 | llvm::Value *EltPtr = Builder.CreateConstGEP2_32(Ptr, 0, i); |
| 1196 | Builder.CreateStore(AI++, EltPtr); |
| 1197 | } |
| 1198 | } else { |
| 1199 | llvm::AllocaInst *TempAlloca = |
| 1200 | CreateTempAlloca(ArgI.getCoerceToType(), "coerce"); |
| 1201 | TempAlloca->setAlignment(AlignmentToUse); |
| 1202 | llvm::Value *TempV = TempAlloca; |
| 1203 | |
| 1204 | for (unsigned i = 0, e = STy->getNumElements(); i != e; ++i) { |
| 1205 | assert(AI != Fn->arg_end() && "Argument mismatch!"); |
| 1206 | AI->setName(Arg->getName() + ".coerce" + Twine(i)); |
| 1207 | llvm::Value *EltPtr = Builder.CreateConstGEP2_32(TempV, 0, i); |
| 1208 | Builder.CreateStore(AI++, EltPtr); |
| 1209 | } |
| 1210 | |
| 1211 | Builder.CreateMemCpy(Ptr, TempV, DstSize, AlignmentToUse); |
Chris Lattner | 309c59f | 2010-06-29 00:06:42 +0000 | [diff] [blame] | 1212 | } |
| 1213 | } else { |
| 1214 | // Simple case, just do a coerced store of the argument into the alloca. |
| 1215 | assert(AI != Fn->arg_end() && "Argument mismatch!"); |
Chris Lattner | 225e286 | 2010-06-29 00:14:52 +0000 | [diff] [blame] | 1216 | AI->setName(Arg->getName() + ".coerce"); |
Chris Lattner | 117e3f4 | 2010-07-30 04:02:24 +0000 | [diff] [blame] | 1217 | CreateCoercedStore(AI++, Ptr, /*DestIsVolatile=*/false, *this); |
Chris Lattner | 309c59f | 2010-06-29 00:06:42 +0000 | [diff] [blame] | 1218 | } |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 1219 | |
| 1220 | |
Daniel Dunbar | 89c9d8e | 2009-02-03 19:12:28 +0000 | [diff] [blame] | 1221 | // Match to what EmitParmDecl is expecting for this type. |
Daniel Dunbar | 8b29a38 | 2009-02-04 07:22:24 +0000 | [diff] [blame] | 1222 | if (!CodeGenFunction::hasAggregateLLVMType(Ty)) { |
Daniel Dunbar | 91a16fa | 2010-08-21 02:24:36 +0000 | [diff] [blame] | 1223 | V = EmitLoadOfScalar(V, false, AlignmentToUse, Ty); |
John McCall | d26bc76 | 2011-03-09 04:27:21 +0000 | [diff] [blame] | 1224 | if (isPromoted) |
| 1225 | V = emitArgumentDemotion(*this, Arg, V); |
Daniel Dunbar | 8b29a38 | 2009-02-04 07:22:24 +0000 | [diff] [blame] | 1226 | } |
Devang Patel | 093ac46 | 2011-03-03 20:13:15 +0000 | [diff] [blame] | 1227 | EmitParmDecl(*Arg, V, ArgNo); |
Chris Lattner | ce70016 | 2010-06-28 23:44:11 +0000 | [diff] [blame] | 1228 | continue; // Skip ++AI increment, already done. |
Daniel Dunbar | 89c9d8e | 2009-02-03 19:12:28 +0000 | [diff] [blame] | 1229 | } |
Chris Lattner | 800588f | 2010-07-29 06:26:06 +0000 | [diff] [blame] | 1230 | |
| 1231 | case ABIArgInfo::Expand: { |
| 1232 | // If this structure was expanded into multiple arguments then |
| 1233 | // we need to create a temporary and reconstruct it from the |
| 1234 | // arguments. |
Eli Friedman | 1bb94a4 | 2011-11-03 21:39:02 +0000 | [diff] [blame] | 1235 | llvm::AllocaInst *Alloca = CreateMemTemp(Ty); |
Eli Friedman | 6da2c71 | 2011-12-03 04:14:32 +0000 | [diff] [blame] | 1236 | CharUnits Align = getContext().getDeclAlign(Arg); |
| 1237 | Alloca->setAlignment(Align.getQuantity()); |
| 1238 | LValue LV = MakeAddrLValue(Alloca, Ty, Align); |
Eli Friedman | 1bb94a4 | 2011-11-03 21:39:02 +0000 | [diff] [blame] | 1239 | llvm::Function::arg_iterator End = ExpandTypeFromArgs(Ty, LV, AI); |
| 1240 | EmitParmDecl(*Arg, Alloca, ArgNo); |
Chris Lattner | 800588f | 2010-07-29 06:26:06 +0000 | [diff] [blame] | 1241 | |
| 1242 | // Name the arguments used in expansion and increment AI. |
| 1243 | unsigned Index = 0; |
| 1244 | for (; AI != End; ++AI, ++Index) |
Chris Lattner | 5f9e272 | 2011-07-23 10:55:15 +0000 | [diff] [blame] | 1245 | AI->setName(Arg->getName() + "." + Twine(Index)); |
Chris Lattner | 800588f | 2010-07-29 06:26:06 +0000 | [diff] [blame] | 1246 | continue; |
| 1247 | } |
| 1248 | |
| 1249 | case ABIArgInfo::Ignore: |
| 1250 | // Initialize the local variable appropriately. |
| 1251 | if (hasAggregateLLVMType(Ty)) |
Devang Patel | 093ac46 | 2011-03-03 20:13:15 +0000 | [diff] [blame] | 1252 | EmitParmDecl(*Arg, CreateMemTemp(Ty), ArgNo); |
Chris Lattner | 800588f | 2010-07-29 06:26:06 +0000 | [diff] [blame] | 1253 | else |
Devang Patel | 093ac46 | 2011-03-03 20:13:15 +0000 | [diff] [blame] | 1254 | EmitParmDecl(*Arg, llvm::UndefValue::get(ConvertType(Arg->getType())), |
| 1255 | ArgNo); |
Chris Lattner | 800588f | 2010-07-29 06:26:06 +0000 | [diff] [blame] | 1256 | |
| 1257 | // Skip increment, no matching LLVM parameter. |
| 1258 | continue; |
Daniel Dunbar | 8951dbd | 2008-09-11 01:48:57 +0000 | [diff] [blame] | 1259 | } |
Daniel Dunbar | 5627377 | 2008-09-17 00:51:38 +0000 | [diff] [blame] | 1260 | |
| 1261 | ++AI; |
Daniel Dunbar | 17b708d | 2008-09-09 23:27:19 +0000 | [diff] [blame] | 1262 | } |
| 1263 | assert(AI == Fn->arg_end() && "Argument mismatch!"); |
| 1264 | } |
| 1265 | |
John McCall | 77fe6cd | 2012-01-29 07:46:59 +0000 | [diff] [blame] | 1266 | static void eraseUnusedBitCasts(llvm::Instruction *insn) { |
| 1267 | while (insn->use_empty()) { |
| 1268 | llvm::BitCastInst *bitcast = dyn_cast<llvm::BitCastInst>(insn); |
| 1269 | if (!bitcast) return; |
| 1270 | |
| 1271 | // This is "safe" because we would have used a ConstantExpr otherwise. |
| 1272 | insn = cast<llvm::Instruction>(bitcast->getOperand(0)); |
| 1273 | bitcast->eraseFromParent(); |
| 1274 | } |
| 1275 | } |
| 1276 | |
John McCall | f85e193 | 2011-06-15 23:02:42 +0000 | [diff] [blame] | 1277 | /// Try to emit a fused autorelease of a return result. |
| 1278 | static llvm::Value *tryEmitFusedAutoreleaseOfResult(CodeGenFunction &CGF, |
| 1279 | llvm::Value *result) { |
| 1280 | // We must be immediately followed the cast. |
| 1281 | llvm::BasicBlock *BB = CGF.Builder.GetInsertBlock(); |
| 1282 | if (BB->empty()) return 0; |
| 1283 | if (&BB->back() != result) return 0; |
| 1284 | |
Chris Lattner | 2acc6e3 | 2011-07-18 04:24:23 +0000 | [diff] [blame] | 1285 | llvm::Type *resultType = result->getType(); |
John McCall | f85e193 | 2011-06-15 23:02:42 +0000 | [diff] [blame] | 1286 | |
| 1287 | // result is in a BasicBlock and is therefore an Instruction. |
| 1288 | llvm::Instruction *generator = cast<llvm::Instruction>(result); |
| 1289 | |
Chris Lattner | 5f9e272 | 2011-07-23 10:55:15 +0000 | [diff] [blame] | 1290 | SmallVector<llvm::Instruction*,4> insnsToKill; |
John McCall | f85e193 | 2011-06-15 23:02:42 +0000 | [diff] [blame] | 1291 | |
| 1292 | // Look for: |
| 1293 | // %generator = bitcast %type1* %generator2 to %type2* |
| 1294 | while (llvm::BitCastInst *bitcast = dyn_cast<llvm::BitCastInst>(generator)) { |
| 1295 | // We would have emitted this as a constant if the operand weren't |
| 1296 | // an Instruction. |
| 1297 | generator = cast<llvm::Instruction>(bitcast->getOperand(0)); |
| 1298 | |
| 1299 | // Require the generator to be immediately followed by the cast. |
| 1300 | if (generator->getNextNode() != bitcast) |
| 1301 | return 0; |
| 1302 | |
| 1303 | insnsToKill.push_back(bitcast); |
| 1304 | } |
| 1305 | |
| 1306 | // Look for: |
| 1307 | // %generator = call i8* @objc_retain(i8* %originalResult) |
| 1308 | // or |
| 1309 | // %generator = call i8* @objc_retainAutoreleasedReturnValue(i8* %originalResult) |
| 1310 | llvm::CallInst *call = dyn_cast<llvm::CallInst>(generator); |
| 1311 | if (!call) return 0; |
| 1312 | |
| 1313 | bool doRetainAutorelease; |
| 1314 | |
| 1315 | if (call->getCalledValue() == CGF.CGM.getARCEntrypoints().objc_retain) { |
| 1316 | doRetainAutorelease = true; |
| 1317 | } else if (call->getCalledValue() == CGF.CGM.getARCEntrypoints() |
| 1318 | .objc_retainAutoreleasedReturnValue) { |
| 1319 | doRetainAutorelease = false; |
| 1320 | |
| 1321 | // Look for an inline asm immediately preceding the call and kill it, too. |
| 1322 | llvm::Instruction *prev = call->getPrevNode(); |
| 1323 | if (llvm::CallInst *asmCall = dyn_cast_or_null<llvm::CallInst>(prev)) |
| 1324 | if (asmCall->getCalledValue() |
| 1325 | == CGF.CGM.getARCEntrypoints().retainAutoreleasedReturnValueMarker) |
| 1326 | insnsToKill.push_back(prev); |
| 1327 | } else { |
| 1328 | return 0; |
| 1329 | } |
| 1330 | |
| 1331 | result = call->getArgOperand(0); |
| 1332 | insnsToKill.push_back(call); |
| 1333 | |
| 1334 | // Keep killing bitcasts, for sanity. Note that we no longer care |
| 1335 | // about precise ordering as long as there's exactly one use. |
| 1336 | while (llvm::BitCastInst *bitcast = dyn_cast<llvm::BitCastInst>(result)) { |
| 1337 | if (!bitcast->hasOneUse()) break; |
| 1338 | insnsToKill.push_back(bitcast); |
| 1339 | result = bitcast->getOperand(0); |
| 1340 | } |
| 1341 | |
| 1342 | // Delete all the unnecessary instructions, from latest to earliest. |
Chris Lattner | 5f9e272 | 2011-07-23 10:55:15 +0000 | [diff] [blame] | 1343 | for (SmallVectorImpl<llvm::Instruction*>::iterator |
John McCall | f85e193 | 2011-06-15 23:02:42 +0000 | [diff] [blame] | 1344 | i = insnsToKill.begin(), e = insnsToKill.end(); i != e; ++i) |
| 1345 | (*i)->eraseFromParent(); |
| 1346 | |
| 1347 | // Do the fused retain/autorelease if we were asked to. |
| 1348 | if (doRetainAutorelease) |
| 1349 | result = CGF.EmitARCRetainAutoreleaseReturnValue(result); |
| 1350 | |
| 1351 | // Cast back to the result type. |
| 1352 | return CGF.Builder.CreateBitCast(result, resultType); |
| 1353 | } |
| 1354 | |
John McCall | 77fe6cd | 2012-01-29 07:46:59 +0000 | [diff] [blame] | 1355 | /// If this is a +1 of the value of an immutable 'self', remove it. |
| 1356 | static llvm::Value *tryRemoveRetainOfSelf(CodeGenFunction &CGF, |
| 1357 | llvm::Value *result) { |
| 1358 | // This is only applicable to a method with an immutable 'self'. |
| 1359 | const ObjCMethodDecl *method = dyn_cast<ObjCMethodDecl>(CGF.CurCodeDecl); |
| 1360 | if (!method) return 0; |
| 1361 | const VarDecl *self = method->getSelfDecl(); |
| 1362 | if (!self->getType().isConstQualified()) return 0; |
| 1363 | |
| 1364 | // Look for a retain call. |
| 1365 | llvm::CallInst *retainCall = |
| 1366 | dyn_cast<llvm::CallInst>(result->stripPointerCasts()); |
| 1367 | if (!retainCall || |
| 1368 | retainCall->getCalledValue() != CGF.CGM.getARCEntrypoints().objc_retain) |
| 1369 | return 0; |
| 1370 | |
| 1371 | // Look for an ordinary load of 'self'. |
| 1372 | llvm::Value *retainedValue = retainCall->getArgOperand(0); |
| 1373 | llvm::LoadInst *load = |
| 1374 | dyn_cast<llvm::LoadInst>(retainedValue->stripPointerCasts()); |
| 1375 | if (!load || load->isAtomic() || load->isVolatile() || |
| 1376 | load->getPointerOperand() != CGF.GetAddrOfLocalVar(self)) |
| 1377 | return 0; |
| 1378 | |
| 1379 | // Okay! Burn it all down. This relies for correctness on the |
| 1380 | // assumption that the retain is emitted as part of the return and |
| 1381 | // that thereafter everything is used "linearly". |
| 1382 | llvm::Type *resultType = result->getType(); |
| 1383 | eraseUnusedBitCasts(cast<llvm::Instruction>(result)); |
| 1384 | assert(retainCall->use_empty()); |
| 1385 | retainCall->eraseFromParent(); |
| 1386 | eraseUnusedBitCasts(cast<llvm::Instruction>(retainedValue)); |
| 1387 | |
| 1388 | return CGF.Builder.CreateBitCast(load, resultType); |
| 1389 | } |
| 1390 | |
John McCall | f85e193 | 2011-06-15 23:02:42 +0000 | [diff] [blame] | 1391 | /// Emit an ARC autorelease of the result of a function. |
John McCall | 77fe6cd | 2012-01-29 07:46:59 +0000 | [diff] [blame] | 1392 | /// |
| 1393 | /// \return the value to actually return from the function |
John McCall | f85e193 | 2011-06-15 23:02:42 +0000 | [diff] [blame] | 1394 | static llvm::Value *emitAutoreleaseOfResult(CodeGenFunction &CGF, |
| 1395 | llvm::Value *result) { |
John McCall | 77fe6cd | 2012-01-29 07:46:59 +0000 | [diff] [blame] | 1396 | // If we're returning 'self', kill the initial retain. This is a |
| 1397 | // heuristic attempt to "encourage correctness" in the really unfortunate |
| 1398 | // case where we have a return of self during a dealloc and we desperately |
| 1399 | // need to avoid the possible autorelease. |
| 1400 | if (llvm::Value *self = tryRemoveRetainOfSelf(CGF, result)) |
| 1401 | return self; |
| 1402 | |
John McCall | f85e193 | 2011-06-15 23:02:42 +0000 | [diff] [blame] | 1403 | // At -O0, try to emit a fused retain/autorelease. |
| 1404 | if (CGF.shouldUseFusedARCCalls()) |
| 1405 | if (llvm::Value *fused = tryEmitFusedAutoreleaseOfResult(CGF, result)) |
| 1406 | return fused; |
| 1407 | |
| 1408 | return CGF.EmitARCAutoreleaseReturnValue(result); |
| 1409 | } |
| 1410 | |
John McCall | f48f796 | 2012-01-29 02:35:02 +0000 | [diff] [blame] | 1411 | /// Heuristically search for a dominating store to the return-value slot. |
| 1412 | static llvm::StoreInst *findDominatingStoreToReturnValue(CodeGenFunction &CGF) { |
| 1413 | // If there are multiple uses of the return-value slot, just check |
| 1414 | // for something immediately preceding the IP. Sometimes this can |
| 1415 | // happen with how we generate implicit-returns; it can also happen |
| 1416 | // with noreturn cleanups. |
| 1417 | if (!CGF.ReturnValue->hasOneUse()) { |
| 1418 | llvm::BasicBlock *IP = CGF.Builder.GetInsertBlock(); |
| 1419 | if (IP->empty()) return 0; |
| 1420 | llvm::StoreInst *store = dyn_cast<llvm::StoreInst>(&IP->back()); |
| 1421 | if (!store) return 0; |
| 1422 | if (store->getPointerOperand() != CGF.ReturnValue) return 0; |
| 1423 | assert(!store->isAtomic() && !store->isVolatile()); // see below |
| 1424 | return store; |
| 1425 | } |
| 1426 | |
| 1427 | llvm::StoreInst *store = |
| 1428 | dyn_cast<llvm::StoreInst>(CGF.ReturnValue->use_back()); |
| 1429 | if (!store) return 0; |
| 1430 | |
| 1431 | // These aren't actually possible for non-coerced returns, and we |
| 1432 | // only care about non-coerced returns on this code path. |
| 1433 | assert(!store->isAtomic() && !store->isVolatile()); |
| 1434 | |
| 1435 | // Now do a first-and-dirty dominance check: just walk up the |
| 1436 | // single-predecessors chain from the current insertion point. |
| 1437 | llvm::BasicBlock *StoreBB = store->getParent(); |
| 1438 | llvm::BasicBlock *IP = CGF.Builder.GetInsertBlock(); |
| 1439 | while (IP != StoreBB) { |
| 1440 | if (!(IP = IP->getSinglePredecessor())) |
| 1441 | return 0; |
| 1442 | } |
| 1443 | |
| 1444 | // Okay, the store's basic block dominates the insertion point; we |
| 1445 | // can do our thing. |
| 1446 | return store; |
| 1447 | } |
| 1448 | |
Chris Lattner | 35b21b8 | 2010-06-27 01:06:27 +0000 | [diff] [blame] | 1449 | void CodeGenFunction::EmitFunctionEpilog(const CGFunctionInfo &FI) { |
Daniel Dunbar | 2c8e0f3 | 2008-09-10 02:41:04 +0000 | [diff] [blame] | 1450 | // Functions with no result always return void. |
Chris Lattner | c6e6dd2 | 2010-06-26 23:13:19 +0000 | [diff] [blame] | 1451 | if (ReturnValue == 0) { |
Daniel Dunbar | 2c8e0f3 | 2008-09-10 02:41:04 +0000 | [diff] [blame] | 1452 | Builder.CreateRetVoid(); |
Chris Lattner | c6e6dd2 | 2010-06-26 23:13:19 +0000 | [diff] [blame] | 1453 | return; |
Daniel Dunbar | 2c8e0f3 | 2008-09-10 02:41:04 +0000 | [diff] [blame] | 1454 | } |
Daniel Dunbar | 21fcc8f | 2010-06-30 21:27:58 +0000 | [diff] [blame] | 1455 | |
Dan Gohman | 4751a53 | 2010-07-20 20:13:52 +0000 | [diff] [blame] | 1456 | llvm::DebugLoc RetDbgLoc; |
Chris Lattner | c6e6dd2 | 2010-06-26 23:13:19 +0000 | [diff] [blame] | 1457 | llvm::Value *RV = 0; |
| 1458 | QualType RetTy = FI.getReturnType(); |
| 1459 | const ABIArgInfo &RetAI = FI.getReturnInfo(); |
| 1460 | |
| 1461 | switch (RetAI.getKind()) { |
Daniel Dunbar | 91a16fa | 2010-08-21 02:24:36 +0000 | [diff] [blame] | 1462 | case ABIArgInfo::Indirect: { |
| 1463 | unsigned Alignment = getContext().getTypeAlignInChars(RetTy).getQuantity(); |
Chris Lattner | c6e6dd2 | 2010-06-26 23:13:19 +0000 | [diff] [blame] | 1464 | if (RetTy->isAnyComplexType()) { |
| 1465 | ComplexPairTy RT = LoadComplexFromAddr(ReturnValue, false); |
| 1466 | StoreComplexToAddr(RT, CurFn->arg_begin(), false); |
| 1467 | } else if (CodeGenFunction::hasAggregateLLVMType(RetTy)) { |
| 1468 | // Do nothing; aggregrates get evaluated directly into the destination. |
| 1469 | } else { |
| 1470 | EmitStoreOfScalar(Builder.CreateLoad(ReturnValue), CurFn->arg_begin(), |
Daniel Dunbar | 91a16fa | 2010-08-21 02:24:36 +0000 | [diff] [blame] | 1471 | false, Alignment, RetTy); |
Chris Lattner | c6e6dd2 | 2010-06-26 23:13:19 +0000 | [diff] [blame] | 1472 | } |
| 1473 | break; |
Daniel Dunbar | 91a16fa | 2010-08-21 02:24:36 +0000 | [diff] [blame] | 1474 | } |
Chris Lattner | c6e6dd2 | 2010-06-26 23:13:19 +0000 | [diff] [blame] | 1475 | |
| 1476 | case ABIArgInfo::Extend: |
Chris Lattner | 800588f | 2010-07-29 06:26:06 +0000 | [diff] [blame] | 1477 | case ABIArgInfo::Direct: |
Chris Lattner | 117e3f4 | 2010-07-30 04:02:24 +0000 | [diff] [blame] | 1478 | if (RetAI.getCoerceToType() == ConvertType(RetTy) && |
| 1479 | RetAI.getDirectOffset() == 0) { |
Chris Lattner | 800588f | 2010-07-29 06:26:06 +0000 | [diff] [blame] | 1480 | // The internal return value temp always will have pointer-to-return-type |
| 1481 | // type, just do a load. |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 1482 | |
John McCall | f48f796 | 2012-01-29 02:35:02 +0000 | [diff] [blame] | 1483 | // If there is a dominating store to ReturnValue, we can elide |
| 1484 | // the load, zap the store, and usually zap the alloca. |
| 1485 | if (llvm::StoreInst *SI = findDominatingStoreToReturnValue(*this)) { |
Chris Lattner | 800588f | 2010-07-29 06:26:06 +0000 | [diff] [blame] | 1486 | // Get the stored value and nuke the now-dead store. |
| 1487 | RetDbgLoc = SI->getDebugLoc(); |
| 1488 | RV = SI->getValueOperand(); |
| 1489 | SI->eraseFromParent(); |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 1490 | |
Chris Lattner | 800588f | 2010-07-29 06:26:06 +0000 | [diff] [blame] | 1491 | // If that was the only use of the return value, nuke it as well now. |
| 1492 | if (ReturnValue->use_empty() && isa<llvm::AllocaInst>(ReturnValue)) { |
| 1493 | cast<llvm::AllocaInst>(ReturnValue)->eraseFromParent(); |
| 1494 | ReturnValue = 0; |
| 1495 | } |
John McCall | f48f796 | 2012-01-29 02:35:02 +0000 | [diff] [blame] | 1496 | |
| 1497 | // Otherwise, we have to do a simple load. |
| 1498 | } else { |
| 1499 | RV = Builder.CreateLoad(ReturnValue); |
Chris Lattner | 35b21b8 | 2010-06-27 01:06:27 +0000 | [diff] [blame] | 1500 | } |
Chris Lattner | 800588f | 2010-07-29 06:26:06 +0000 | [diff] [blame] | 1501 | } else { |
Chris Lattner | 117e3f4 | 2010-07-30 04:02:24 +0000 | [diff] [blame] | 1502 | llvm::Value *V = ReturnValue; |
| 1503 | // If the value is offset in memory, apply the offset now. |
| 1504 | if (unsigned Offs = RetAI.getDirectOffset()) { |
| 1505 | V = Builder.CreateBitCast(V, Builder.getInt8PtrTy()); |
| 1506 | V = Builder.CreateConstGEP1_32(V, Offs); |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 1507 | V = Builder.CreateBitCast(V, |
Chris Lattner | 117e3f4 | 2010-07-30 04:02:24 +0000 | [diff] [blame] | 1508 | llvm::PointerType::getUnqual(RetAI.getCoerceToType())); |
| 1509 | } |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 1510 | |
Chris Lattner | 117e3f4 | 2010-07-30 04:02:24 +0000 | [diff] [blame] | 1511 | RV = CreateCoercedLoad(V, RetAI.getCoerceToType(), *this); |
Chris Lattner | 35b21b8 | 2010-06-27 01:06:27 +0000 | [diff] [blame] | 1512 | } |
John McCall | f85e193 | 2011-06-15 23:02:42 +0000 | [diff] [blame] | 1513 | |
| 1514 | // In ARC, end functions that return a retainable type with a call |
| 1515 | // to objc_autoreleaseReturnValue. |
| 1516 | if (AutoreleaseResult) { |
David Blaikie | 4e4d084 | 2012-03-11 07:00:24 +0000 | [diff] [blame] | 1517 | assert(getLangOpts().ObjCAutoRefCount && |
John McCall | f85e193 | 2011-06-15 23:02:42 +0000 | [diff] [blame] | 1518 | !FI.isReturnsRetained() && |
| 1519 | RetTy->isObjCRetainableType()); |
| 1520 | RV = emitAutoreleaseOfResult(*this, RV); |
| 1521 | } |
| 1522 | |
Chris Lattner | c6e6dd2 | 2010-06-26 23:13:19 +0000 | [diff] [blame] | 1523 | break; |
Chris Lattner | c6e6dd2 | 2010-06-26 23:13:19 +0000 | [diff] [blame] | 1524 | |
Chris Lattner | 800588f | 2010-07-29 06:26:06 +0000 | [diff] [blame] | 1525 | case ABIArgInfo::Ignore: |
Chris Lattner | c6e6dd2 | 2010-06-26 23:13:19 +0000 | [diff] [blame] | 1526 | break; |
| 1527 | |
| 1528 | case ABIArgInfo::Expand: |
David Blaikie | b219cfc | 2011-09-23 05:06:16 +0000 | [diff] [blame] | 1529 | llvm_unreachable("Invalid ABI kind for return argument"); |
Chris Lattner | c6e6dd2 | 2010-06-26 23:13:19 +0000 | [diff] [blame] | 1530 | } |
| 1531 | |
Daniel Dunbar | 21fcc8f | 2010-06-30 21:27:58 +0000 | [diff] [blame] | 1532 | llvm::Instruction *Ret = RV ? Builder.CreateRet(RV) : Builder.CreateRetVoid(); |
Devang Patel | d3f265d | 2010-07-21 18:08:50 +0000 | [diff] [blame] | 1533 | if (!RetDbgLoc.isUnknown()) |
| 1534 | Ret->setDebugLoc(RetDbgLoc); |
Daniel Dunbar | 17b708d | 2008-09-09 23:27:19 +0000 | [diff] [blame] | 1535 | } |
| 1536 | |
John McCall | 413ebdb | 2011-03-11 20:59:21 +0000 | [diff] [blame] | 1537 | void CodeGenFunction::EmitDelegateCallArg(CallArgList &args, |
| 1538 | const VarDecl *param) { |
John McCall | 2736071 | 2010-05-26 22:34:26 +0000 | [diff] [blame] | 1539 | // StartFunction converted the ABI-lowered parameter(s) into a |
| 1540 | // local alloca. We need to turn that into an r-value suitable |
| 1541 | // for EmitCall. |
John McCall | 413ebdb | 2011-03-11 20:59:21 +0000 | [diff] [blame] | 1542 | llvm::Value *local = GetAddrOfLocalVar(param); |
John McCall | 2736071 | 2010-05-26 22:34:26 +0000 | [diff] [blame] | 1543 | |
John McCall | 413ebdb | 2011-03-11 20:59:21 +0000 | [diff] [blame] | 1544 | QualType type = param->getType(); |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 1545 | |
John McCall | 2736071 | 2010-05-26 22:34:26 +0000 | [diff] [blame] | 1546 | // For the most part, we just need to load the alloca, except: |
| 1547 | // 1) aggregate r-values are actually pointers to temporaries, and |
| 1548 | // 2) references to aggregates are pointers directly to the aggregate. |
| 1549 | // I don't know why references to non-aggregates are different here. |
John McCall | 413ebdb | 2011-03-11 20:59:21 +0000 | [diff] [blame] | 1550 | if (const ReferenceType *ref = type->getAs<ReferenceType>()) { |
| 1551 | if (hasAggregateLLVMType(ref->getPointeeType())) |
| 1552 | return args.add(RValue::getAggregate(local), type); |
John McCall | 2736071 | 2010-05-26 22:34:26 +0000 | [diff] [blame] | 1553 | |
| 1554 | // Locals which are references to scalars are represented |
| 1555 | // with allocas holding the pointer. |
John McCall | 413ebdb | 2011-03-11 20:59:21 +0000 | [diff] [blame] | 1556 | return args.add(RValue::get(Builder.CreateLoad(local)), type); |
John McCall | 2736071 | 2010-05-26 22:34:26 +0000 | [diff] [blame] | 1557 | } |
| 1558 | |
John McCall | 413ebdb | 2011-03-11 20:59:21 +0000 | [diff] [blame] | 1559 | if (type->isAnyComplexType()) { |
| 1560 | ComplexPairTy complex = LoadComplexFromAddr(local, /*volatile*/ false); |
| 1561 | return args.add(RValue::getComplex(complex), type); |
| 1562 | } |
John McCall | 2736071 | 2010-05-26 22:34:26 +0000 | [diff] [blame] | 1563 | |
John McCall | 413ebdb | 2011-03-11 20:59:21 +0000 | [diff] [blame] | 1564 | if (hasAggregateLLVMType(type)) |
| 1565 | return args.add(RValue::getAggregate(local), type); |
John McCall | 2736071 | 2010-05-26 22:34:26 +0000 | [diff] [blame] | 1566 | |
John McCall | 413ebdb | 2011-03-11 20:59:21 +0000 | [diff] [blame] | 1567 | unsigned alignment = getContext().getDeclAlign(param).getQuantity(); |
| 1568 | llvm::Value *value = EmitLoadOfScalar(local, false, alignment, type); |
| 1569 | return args.add(RValue::get(value), type); |
John McCall | 2736071 | 2010-05-26 22:34:26 +0000 | [diff] [blame] | 1570 | } |
| 1571 | |
John McCall | f85e193 | 2011-06-15 23:02:42 +0000 | [diff] [blame] | 1572 | static bool isProvablyNull(llvm::Value *addr) { |
| 1573 | return isa<llvm::ConstantPointerNull>(addr); |
| 1574 | } |
| 1575 | |
| 1576 | static bool isProvablyNonNull(llvm::Value *addr) { |
| 1577 | return isa<llvm::AllocaInst>(addr); |
| 1578 | } |
| 1579 | |
| 1580 | /// Emit the actual writing-back of a writeback. |
| 1581 | static void emitWriteback(CodeGenFunction &CGF, |
| 1582 | const CallArgList::Writeback &writeback) { |
| 1583 | llvm::Value *srcAddr = writeback.Address; |
| 1584 | assert(!isProvablyNull(srcAddr) && |
| 1585 | "shouldn't have writeback for provably null argument"); |
| 1586 | |
| 1587 | llvm::BasicBlock *contBB = 0; |
| 1588 | |
| 1589 | // If the argument wasn't provably non-null, we need to null check |
| 1590 | // before doing the store. |
| 1591 | bool provablyNonNull = isProvablyNonNull(srcAddr); |
| 1592 | if (!provablyNonNull) { |
| 1593 | llvm::BasicBlock *writebackBB = CGF.createBasicBlock("icr.writeback"); |
| 1594 | contBB = CGF.createBasicBlock("icr.done"); |
| 1595 | |
| 1596 | llvm::Value *isNull = CGF.Builder.CreateIsNull(srcAddr, "icr.isnull"); |
| 1597 | CGF.Builder.CreateCondBr(isNull, contBB, writebackBB); |
| 1598 | CGF.EmitBlock(writebackBB); |
| 1599 | } |
| 1600 | |
| 1601 | // Load the value to writeback. |
| 1602 | llvm::Value *value = CGF.Builder.CreateLoad(writeback.Temporary); |
| 1603 | |
| 1604 | // Cast it back, in case we're writing an id to a Foo* or something. |
| 1605 | value = CGF.Builder.CreateBitCast(value, |
| 1606 | cast<llvm::PointerType>(srcAddr->getType())->getElementType(), |
| 1607 | "icr.writeback-cast"); |
| 1608 | |
| 1609 | // Perform the writeback. |
| 1610 | QualType srcAddrType = writeback.AddressType; |
| 1611 | CGF.EmitStoreThroughLValue(RValue::get(value), |
John McCall | 545d996 | 2011-06-25 02:11:03 +0000 | [diff] [blame] | 1612 | CGF.MakeAddrLValue(srcAddr, srcAddrType)); |
John McCall | f85e193 | 2011-06-15 23:02:42 +0000 | [diff] [blame] | 1613 | |
| 1614 | // Jump to the continuation block. |
| 1615 | if (!provablyNonNull) |
| 1616 | CGF.EmitBlock(contBB); |
| 1617 | } |
| 1618 | |
| 1619 | static void emitWritebacks(CodeGenFunction &CGF, |
| 1620 | const CallArgList &args) { |
| 1621 | for (CallArgList::writeback_iterator |
| 1622 | i = args.writeback_begin(), e = args.writeback_end(); i != e; ++i) |
| 1623 | emitWriteback(CGF, *i); |
| 1624 | } |
| 1625 | |
| 1626 | /// Emit an argument that's being passed call-by-writeback. That is, |
| 1627 | /// we are passing the address of |
| 1628 | static void emitWritebackArg(CodeGenFunction &CGF, CallArgList &args, |
| 1629 | const ObjCIndirectCopyRestoreExpr *CRE) { |
| 1630 | llvm::Value *srcAddr = CGF.EmitScalarExpr(CRE->getSubExpr()); |
| 1631 | |
| 1632 | // The dest and src types don't necessarily match in LLVM terms |
| 1633 | // because of the crazy ObjC compatibility rules. |
| 1634 | |
Chris Lattner | 2acc6e3 | 2011-07-18 04:24:23 +0000 | [diff] [blame] | 1635 | llvm::PointerType *destType = |
John McCall | f85e193 | 2011-06-15 23:02:42 +0000 | [diff] [blame] | 1636 | cast<llvm::PointerType>(CGF.ConvertType(CRE->getType())); |
| 1637 | |
| 1638 | // If the address is a constant null, just pass the appropriate null. |
| 1639 | if (isProvablyNull(srcAddr)) { |
| 1640 | args.add(RValue::get(llvm::ConstantPointerNull::get(destType)), |
| 1641 | CRE->getType()); |
| 1642 | return; |
| 1643 | } |
| 1644 | |
| 1645 | QualType srcAddrType = |
| 1646 | CRE->getSubExpr()->getType()->castAs<PointerType>()->getPointeeType(); |
| 1647 | |
| 1648 | // Create the temporary. |
| 1649 | llvm::Value *temp = CGF.CreateTempAlloca(destType->getElementType(), |
| 1650 | "icr.temp"); |
| 1651 | |
| 1652 | // Zero-initialize it if we're not doing a copy-initialization. |
| 1653 | bool shouldCopy = CRE->shouldCopy(); |
| 1654 | if (!shouldCopy) { |
| 1655 | llvm::Value *null = |
| 1656 | llvm::ConstantPointerNull::get( |
| 1657 | cast<llvm::PointerType>(destType->getElementType())); |
| 1658 | CGF.Builder.CreateStore(null, temp); |
| 1659 | } |
| 1660 | |
| 1661 | llvm::BasicBlock *contBB = 0; |
| 1662 | |
| 1663 | // If the address is *not* known to be non-null, we need to switch. |
| 1664 | llvm::Value *finalArgument; |
| 1665 | |
| 1666 | bool provablyNonNull = isProvablyNonNull(srcAddr); |
| 1667 | if (provablyNonNull) { |
| 1668 | finalArgument = temp; |
| 1669 | } else { |
| 1670 | llvm::Value *isNull = CGF.Builder.CreateIsNull(srcAddr, "icr.isnull"); |
| 1671 | |
| 1672 | finalArgument = CGF.Builder.CreateSelect(isNull, |
| 1673 | llvm::ConstantPointerNull::get(destType), |
| 1674 | temp, "icr.argument"); |
| 1675 | |
| 1676 | // If we need to copy, then the load has to be conditional, which |
| 1677 | // means we need control flow. |
| 1678 | if (shouldCopy) { |
| 1679 | contBB = CGF.createBasicBlock("icr.cont"); |
| 1680 | llvm::BasicBlock *copyBB = CGF.createBasicBlock("icr.copy"); |
| 1681 | CGF.Builder.CreateCondBr(isNull, contBB, copyBB); |
| 1682 | CGF.EmitBlock(copyBB); |
| 1683 | } |
| 1684 | } |
| 1685 | |
| 1686 | // Perform a copy if necessary. |
| 1687 | if (shouldCopy) { |
| 1688 | LValue srcLV = CGF.MakeAddrLValue(srcAddr, srcAddrType); |
John McCall | 545d996 | 2011-06-25 02:11:03 +0000 | [diff] [blame] | 1689 | RValue srcRV = CGF.EmitLoadOfLValue(srcLV); |
John McCall | f85e193 | 2011-06-15 23:02:42 +0000 | [diff] [blame] | 1690 | assert(srcRV.isScalar()); |
| 1691 | |
| 1692 | llvm::Value *src = srcRV.getScalarVal(); |
| 1693 | src = CGF.Builder.CreateBitCast(src, destType->getElementType(), |
| 1694 | "icr.cast"); |
| 1695 | |
| 1696 | // Use an ordinary store, not a store-to-lvalue. |
| 1697 | CGF.Builder.CreateStore(src, temp); |
| 1698 | } |
| 1699 | |
| 1700 | // Finish the control flow if we needed it. |
| 1701 | if (shouldCopy && !provablyNonNull) |
| 1702 | CGF.EmitBlock(contBB); |
| 1703 | |
| 1704 | args.addWriteback(srcAddr, srcAddrType, temp); |
| 1705 | args.add(RValue::get(finalArgument), CRE->getType()); |
| 1706 | } |
| 1707 | |
John McCall | 413ebdb | 2011-03-11 20:59:21 +0000 | [diff] [blame] | 1708 | void CodeGenFunction::EmitCallArg(CallArgList &args, const Expr *E, |
| 1709 | QualType type) { |
John McCall | f85e193 | 2011-06-15 23:02:42 +0000 | [diff] [blame] | 1710 | if (const ObjCIndirectCopyRestoreExpr *CRE |
| 1711 | = dyn_cast<ObjCIndirectCopyRestoreExpr>(E)) { |
David Blaikie | 4e4d084 | 2012-03-11 07:00:24 +0000 | [diff] [blame] | 1712 | assert(getContext().getLangOpts().ObjCAutoRefCount); |
John McCall | f85e193 | 2011-06-15 23:02:42 +0000 | [diff] [blame] | 1713 | assert(getContext().hasSameType(E->getType(), type)); |
| 1714 | return emitWritebackArg(*this, args, CRE); |
| 1715 | } |
| 1716 | |
John McCall | 8affed5 | 2011-08-26 18:42:59 +0000 | [diff] [blame] | 1717 | assert(type->isReferenceType() == E->isGLValue() && |
| 1718 | "reference binding to unmaterialized r-value!"); |
| 1719 | |
John McCall | cec52f0 | 2011-08-26 21:08:13 +0000 | [diff] [blame] | 1720 | if (E->isGLValue()) { |
| 1721 | assert(E->getObjectKind() == OK_Ordinary); |
John McCall | 413ebdb | 2011-03-11 20:59:21 +0000 | [diff] [blame] | 1722 | return args.add(EmitReferenceBindingToExpr(E, /*InitializedDecl=*/0), |
| 1723 | type); |
John McCall | cec52f0 | 2011-08-26 21:08:13 +0000 | [diff] [blame] | 1724 | } |
Mike Stump | 1eb4433 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 1725 | |
Eli Friedman | 70cbd2a | 2011-06-15 18:26:32 +0000 | [diff] [blame] | 1726 | if (hasAggregateLLVMType(type) && !E->getType()->isAnyComplexType() && |
| 1727 | isa<ImplicitCastExpr>(E) && |
Eli Friedman | 55d4848 | 2011-05-26 00:10:27 +0000 | [diff] [blame] | 1728 | cast<CastExpr>(E)->getCastKind() == CK_LValueToRValue) { |
| 1729 | LValue L = EmitLValue(cast<CastExpr>(E)->getSubExpr()); |
| 1730 | assert(L.isSimple()); |
Eli Friedman | 51f5120 | 2011-12-03 03:08:40 +0000 | [diff] [blame] | 1731 | args.add(L.asAggregateRValue(), type, /*NeedsCopy*/true); |
Eli Friedman | 55d4848 | 2011-05-26 00:10:27 +0000 | [diff] [blame] | 1732 | return; |
| 1733 | } |
| 1734 | |
John McCall | 413ebdb | 2011-03-11 20:59:21 +0000 | [diff] [blame] | 1735 | args.add(EmitAnyExprToTemp(E), type); |
Anders Carlsson | 0139bb9 | 2009-04-08 20:47:54 +0000 | [diff] [blame] | 1736 | } |
| 1737 | |
Dan Gohman | b49bd27 | 2012-02-16 00:57:37 +0000 | [diff] [blame] | 1738 | // In ObjC ARC mode with no ObjC ARC exception safety, tell the ARC |
| 1739 | // optimizer it can aggressively ignore unwind edges. |
| 1740 | void |
| 1741 | CodeGenFunction::AddObjCARCExceptionMetadata(llvm::Instruction *Inst) { |
| 1742 | if (CGM.getCodeGenOpts().OptimizationLevel != 0 && |
| 1743 | !CGM.getCodeGenOpts().ObjCAutoRefCountExceptions) |
| 1744 | Inst->setMetadata("clang.arc.no_objc_arc_exceptions", |
| 1745 | CGM.getNoObjCARCExceptionsMetadata()); |
| 1746 | } |
| 1747 | |
John McCall | f1549f6 | 2010-07-06 01:34:17 +0000 | [diff] [blame] | 1748 | /// Emits a call or invoke instruction to the given function, depending |
| 1749 | /// on the current state of the EH stack. |
| 1750 | llvm::CallSite |
| 1751 | CodeGenFunction::EmitCallOrInvoke(llvm::Value *Callee, |
Chris Lattner | 2d3ba4f | 2011-07-23 17:14:25 +0000 | [diff] [blame] | 1752 | ArrayRef<llvm::Value *> Args, |
Chris Lattner | 5f9e272 | 2011-07-23 10:55:15 +0000 | [diff] [blame] | 1753 | const Twine &Name) { |
John McCall | f1549f6 | 2010-07-06 01:34:17 +0000 | [diff] [blame] | 1754 | llvm::BasicBlock *InvokeDest = getInvokeDest(); |
John McCall | f1549f6 | 2010-07-06 01:34:17 +0000 | [diff] [blame] | 1755 | |
Dan Gohman | b49bd27 | 2012-02-16 00:57:37 +0000 | [diff] [blame] | 1756 | llvm::Instruction *Inst; |
| 1757 | if (!InvokeDest) |
| 1758 | Inst = Builder.CreateCall(Callee, Args, Name); |
| 1759 | else { |
| 1760 | llvm::BasicBlock *ContBB = createBasicBlock("invoke.cont"); |
| 1761 | Inst = Builder.CreateInvoke(Callee, ContBB, InvokeDest, Args, Name); |
| 1762 | EmitBlock(ContBB); |
| 1763 | } |
| 1764 | |
| 1765 | // In ObjC ARC mode with no ObjC ARC exception safety, tell the ARC |
| 1766 | // optimizer it can aggressively ignore unwind edges. |
David Blaikie | 4e4d084 | 2012-03-11 07:00:24 +0000 | [diff] [blame] | 1767 | if (CGM.getLangOpts().ObjCAutoRefCount) |
Dan Gohman | b49bd27 | 2012-02-16 00:57:37 +0000 | [diff] [blame] | 1768 | AddObjCARCExceptionMetadata(Inst); |
| 1769 | |
| 1770 | return Inst; |
John McCall | f1549f6 | 2010-07-06 01:34:17 +0000 | [diff] [blame] | 1771 | } |
| 1772 | |
Jay Foad | 4c7d9f1 | 2011-07-15 08:37:34 +0000 | [diff] [blame] | 1773 | llvm::CallSite |
| 1774 | CodeGenFunction::EmitCallOrInvoke(llvm::Value *Callee, |
Chris Lattner | 5f9e272 | 2011-07-23 10:55:15 +0000 | [diff] [blame] | 1775 | const Twine &Name) { |
Chris Lattner | 2d3ba4f | 2011-07-23 17:14:25 +0000 | [diff] [blame] | 1776 | return EmitCallOrInvoke(Callee, ArrayRef<llvm::Value *>(), Name); |
Jay Foad | 4c7d9f1 | 2011-07-15 08:37:34 +0000 | [diff] [blame] | 1777 | } |
| 1778 | |
Chris Lattner | 7085544 | 2011-07-12 04:46:18 +0000 | [diff] [blame] | 1779 | static void checkArgMatches(llvm::Value *Elt, unsigned &ArgNo, |
| 1780 | llvm::FunctionType *FTy) { |
| 1781 | if (ArgNo < FTy->getNumParams()) |
| 1782 | assert(Elt->getType() == FTy->getParamType(ArgNo)); |
| 1783 | else |
| 1784 | assert(FTy->isVarArg()); |
| 1785 | ++ArgNo; |
| 1786 | } |
| 1787 | |
Chris Lattner | 811bf36 | 2011-07-12 06:29:11 +0000 | [diff] [blame] | 1788 | void CodeGenFunction::ExpandTypeToArgs(QualType Ty, RValue RV, |
Chris Lattner | 5f9e272 | 2011-07-23 10:55:15 +0000 | [diff] [blame] | 1789 | SmallVector<llvm::Value*,16> &Args, |
Chris Lattner | 811bf36 | 2011-07-12 06:29:11 +0000 | [diff] [blame] | 1790 | llvm::FunctionType *IRFuncTy) { |
Bob Wilson | 194f06a | 2011-08-03 05:58:22 +0000 | [diff] [blame] | 1791 | if (const ConstantArrayType *AT = getContext().getAsConstantArrayType(Ty)) { |
| 1792 | unsigned NumElts = AT->getSize().getZExtValue(); |
| 1793 | QualType EltTy = AT->getElementType(); |
| 1794 | llvm::Value *Addr = RV.getAggregateAddr(); |
| 1795 | for (unsigned Elt = 0; Elt < NumElts; ++Elt) { |
| 1796 | llvm::Value *EltAddr = Builder.CreateConstGEP2_32(Addr, 0, Elt); |
| 1797 | LValue LV = MakeAddrLValue(EltAddr, EltTy); |
| 1798 | RValue EltRV; |
Eli Friedman | ca3d3fc | 2011-11-15 02:46:03 +0000 | [diff] [blame] | 1799 | if (EltTy->isAnyComplexType()) |
| 1800 | // FIXME: Volatile? |
| 1801 | EltRV = RValue::getComplex(LoadComplexFromAddr(LV.getAddress(), false)); |
| 1802 | else if (CodeGenFunction::hasAggregateLLVMType(EltTy)) |
Eli Friedman | 51f5120 | 2011-12-03 03:08:40 +0000 | [diff] [blame] | 1803 | EltRV = LV.asAggregateRValue(); |
Bob Wilson | 194f06a | 2011-08-03 05:58:22 +0000 | [diff] [blame] | 1804 | else |
| 1805 | EltRV = EmitLoadOfLValue(LV); |
| 1806 | ExpandTypeToArgs(EltTy, EltRV, Args, IRFuncTy); |
Chris Lattner | 811bf36 | 2011-07-12 06:29:11 +0000 | [diff] [blame] | 1807 | } |
Anton Korobeynikov | eaf856d | 2012-04-13 11:22:00 +0000 | [diff] [blame] | 1808 | } else if (const RecordType *RT = Ty->getAs<RecordType>()) { |
Bob Wilson | 194f06a | 2011-08-03 05:58:22 +0000 | [diff] [blame] | 1809 | RecordDecl *RD = RT->getDecl(); |
| 1810 | assert(RV.isAggregate() && "Unexpected rvalue during struct expansion"); |
| 1811 | llvm::Value *Addr = RV.getAggregateAddr(); |
Anton Korobeynikov | eaf856d | 2012-04-13 11:22:00 +0000 | [diff] [blame] | 1812 | |
| 1813 | if (RD->isUnion()) { |
| 1814 | const FieldDecl *LargestFD = 0; |
| 1815 | CharUnits UnionSize = CharUnits::Zero(); |
| 1816 | |
| 1817 | for (RecordDecl::field_iterator i = RD->field_begin(), e = RD->field_end(); |
| 1818 | i != e; ++i) { |
| 1819 | const FieldDecl *FD = *i; |
| 1820 | assert(!FD->isBitField() && |
| 1821 | "Cannot expand structure with bit-field members."); |
| 1822 | CharUnits FieldSize = getContext().getTypeSizeInChars(FD->getType()); |
| 1823 | if (UnionSize < FieldSize) { |
| 1824 | UnionSize = FieldSize; |
| 1825 | LargestFD = FD; |
| 1826 | } |
| 1827 | } |
| 1828 | if (LargestFD) { |
| 1829 | RValue FldRV = EmitRValueForField(Addr, LargestFD); |
| 1830 | ExpandTypeToArgs(LargestFD->getType(), FldRV, Args, IRFuncTy); |
| 1831 | } |
| 1832 | } else { |
| 1833 | for (RecordDecl::field_iterator i = RD->field_begin(), e = RD->field_end(); |
| 1834 | i != e; ++i) { |
| 1835 | FieldDecl *FD = *i; |
| 1836 | |
| 1837 | RValue FldRV = EmitRValueForField(Addr, FD); |
| 1838 | ExpandTypeToArgs(FD->getType(), FldRV, Args, IRFuncTy); |
| 1839 | } |
Bob Wilson | 194f06a | 2011-08-03 05:58:22 +0000 | [diff] [blame] | 1840 | } |
Eli Friedman | ca3d3fc | 2011-11-15 02:46:03 +0000 | [diff] [blame] | 1841 | } else if (Ty->isAnyComplexType()) { |
Bob Wilson | 194f06a | 2011-08-03 05:58:22 +0000 | [diff] [blame] | 1842 | ComplexPairTy CV = RV.getComplexVal(); |
| 1843 | Args.push_back(CV.first); |
| 1844 | Args.push_back(CV.second); |
| 1845 | } else { |
Chris Lattner | 811bf36 | 2011-07-12 06:29:11 +0000 | [diff] [blame] | 1846 | assert(RV.isScalar() && |
| 1847 | "Unexpected non-scalar rvalue during struct expansion."); |
| 1848 | |
| 1849 | // Insert a bitcast as needed. |
| 1850 | llvm::Value *V = RV.getScalarVal(); |
| 1851 | if (Args.size() < IRFuncTy->getNumParams() && |
| 1852 | V->getType() != IRFuncTy->getParamType(Args.size())) |
| 1853 | V = Builder.CreateBitCast(V, IRFuncTy->getParamType(Args.size())); |
| 1854 | |
| 1855 | Args.push_back(V); |
| 1856 | } |
| 1857 | } |
| 1858 | |
| 1859 | |
Daniel Dunbar | 88b5396 | 2009-02-02 22:03:45 +0000 | [diff] [blame] | 1860 | RValue CodeGenFunction::EmitCall(const CGFunctionInfo &CallInfo, |
Mike Stump | 1eb4433 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 1861 | llvm::Value *Callee, |
Anders Carlsson | f3c47c9 | 2009-12-24 19:25:24 +0000 | [diff] [blame] | 1862 | ReturnValueSlot ReturnValue, |
Daniel Dunbar | c0ef9f5 | 2009-02-20 18:06:48 +0000 | [diff] [blame] | 1863 | const CallArgList &CallArgs, |
David Chisnall | dd5c98f | 2010-05-01 11:15:56 +0000 | [diff] [blame] | 1864 | const Decl *TargetDecl, |
David Chisnall | 4b02afc | 2010-05-02 13:41:58 +0000 | [diff] [blame] | 1865 | llvm::Instruction **callOrInvoke) { |
Mike Stump | f5408fe | 2009-05-16 07:57:57 +0000 | [diff] [blame] | 1866 | // FIXME: We no longer need the types from CallArgs; lift up and simplify. |
Chris Lattner | 5f9e272 | 2011-07-23 10:55:15 +0000 | [diff] [blame] | 1867 | SmallVector<llvm::Value*, 16> Args; |
Daniel Dunbar | 17b708d | 2008-09-09 23:27:19 +0000 | [diff] [blame] | 1868 | |
| 1869 | // Handle struct-return functions by passing a pointer to the |
| 1870 | // location that we would like to return into. |
Daniel Dunbar | bb36d33 | 2009-02-02 21:43:58 +0000 | [diff] [blame] | 1871 | QualType RetTy = CallInfo.getReturnType(); |
Daniel Dunbar | b225be4 | 2009-02-03 05:59:18 +0000 | [diff] [blame] | 1872 | const ABIArgInfo &RetAI = CallInfo.getReturnInfo(); |
Mike Stump | 1eb4433 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 1873 | |
Chris Lattner | 7085544 | 2011-07-12 04:46:18 +0000 | [diff] [blame] | 1874 | // IRArgNo - Keep track of the argument number in the callee we're looking at. |
| 1875 | unsigned IRArgNo = 0; |
| 1876 | llvm::FunctionType *IRFuncTy = |
| 1877 | cast<llvm::FunctionType>( |
| 1878 | cast<llvm::PointerType>(Callee->getType())->getElementType()); |
Mike Stump | 1eb4433 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 1879 | |
Chris Lattner | 5db7ae5 | 2009-06-13 00:26:38 +0000 | [diff] [blame] | 1880 | // If the call returns a temporary with struct return, create a temporary |
Anders Carlsson | d2490a9 | 2009-12-24 20:40:36 +0000 | [diff] [blame] | 1881 | // alloca to hold the result, unless one is given to us. |
Daniel Dunbar | dacf9dd | 2010-07-14 23:39:36 +0000 | [diff] [blame] | 1882 | if (CGM.ReturnTypeUsesSRet(CallInfo)) { |
Anders Carlsson | d2490a9 | 2009-12-24 20:40:36 +0000 | [diff] [blame] | 1883 | llvm::Value *Value = ReturnValue.getValue(); |
| 1884 | if (!Value) |
Daniel Dunbar | 195337d | 2010-02-09 02:48:28 +0000 | [diff] [blame] | 1885 | Value = CreateMemTemp(RetTy); |
Anders Carlsson | d2490a9 | 2009-12-24 20:40:36 +0000 | [diff] [blame] | 1886 | Args.push_back(Value); |
Chris Lattner | 7085544 | 2011-07-12 04:46:18 +0000 | [diff] [blame] | 1887 | checkArgMatches(Value, IRArgNo, IRFuncTy); |
Anders Carlsson | d2490a9 | 2009-12-24 20:40:36 +0000 | [diff] [blame] | 1888 | } |
Mike Stump | 1eb4433 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 1889 | |
Daniel Dunbar | 4b5f0a4 | 2009-02-04 21:17:21 +0000 | [diff] [blame] | 1890 | assert(CallInfo.arg_size() == CallArgs.size() && |
| 1891 | "Mismatch between function signature & arguments."); |
Daniel Dunbar | b225be4 | 2009-02-03 05:59:18 +0000 | [diff] [blame] | 1892 | CGFunctionInfo::const_arg_iterator info_it = CallInfo.arg_begin(); |
Mike Stump | 1eb4433 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 1893 | for (CallArgList::const_iterator I = CallArgs.begin(), E = CallArgs.end(); |
Daniel Dunbar | b225be4 | 2009-02-03 05:59:18 +0000 | [diff] [blame] | 1894 | I != E; ++I, ++info_it) { |
| 1895 | const ABIArgInfo &ArgInfo = info_it->info; |
Eli Friedman | c6d0782 | 2011-05-02 18:05:27 +0000 | [diff] [blame] | 1896 | RValue RV = I->RV; |
Daniel Dunbar | 5627377 | 2008-09-17 00:51:38 +0000 | [diff] [blame] | 1897 | |
Eli Friedman | 97cb5a4 | 2011-06-15 22:09:18 +0000 | [diff] [blame] | 1898 | unsigned TypeAlign = |
Eli Friedman | c6d0782 | 2011-05-02 18:05:27 +0000 | [diff] [blame] | 1899 | getContext().getTypeAlignInChars(I->Ty).getQuantity(); |
Daniel Dunbar | 5627377 | 2008-09-17 00:51:38 +0000 | [diff] [blame] | 1900 | switch (ArgInfo.getKind()) { |
Daniel Dunbar | 91a16fa | 2010-08-21 02:24:36 +0000 | [diff] [blame] | 1901 | case ABIArgInfo::Indirect: { |
Daniel Dunbar | 1f74598 | 2009-02-05 09:16:39 +0000 | [diff] [blame] | 1902 | if (RV.isScalar() || RV.isComplex()) { |
| 1903 | // Make a temporary alloca to pass the argument. |
Eli Friedman | 70cbd2a | 2011-06-15 18:26:32 +0000 | [diff] [blame] | 1904 | llvm::AllocaInst *AI = CreateMemTemp(I->Ty); |
| 1905 | if (ArgInfo.getIndirectAlign() > AI->getAlignment()) |
| 1906 | AI->setAlignment(ArgInfo.getIndirectAlign()); |
| 1907 | Args.push_back(AI); |
Chris Lattner | 7085544 | 2011-07-12 04:46:18 +0000 | [diff] [blame] | 1908 | |
Daniel Dunbar | 1f74598 | 2009-02-05 09:16:39 +0000 | [diff] [blame] | 1909 | if (RV.isScalar()) |
Daniel Dunbar | 91a16fa | 2010-08-21 02:24:36 +0000 | [diff] [blame] | 1910 | EmitStoreOfScalar(RV.getScalarVal(), Args.back(), false, |
Eli Friedman | 97cb5a4 | 2011-06-15 22:09:18 +0000 | [diff] [blame] | 1911 | TypeAlign, I->Ty); |
Daniel Dunbar | 1f74598 | 2009-02-05 09:16:39 +0000 | [diff] [blame] | 1912 | else |
Mike Stump | 1eb4433 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 1913 | StoreComplexToAddr(RV.getComplexVal(), Args.back(), false); |
Chris Lattner | 7085544 | 2011-07-12 04:46:18 +0000 | [diff] [blame] | 1914 | |
| 1915 | // Validate argument match. |
| 1916 | checkArgMatches(AI, IRArgNo, IRFuncTy); |
Daniel Dunbar | 1f74598 | 2009-02-05 09:16:39 +0000 | [diff] [blame] | 1917 | } else { |
Eli Friedman | ea5e4da | 2011-06-14 01:37:52 +0000 | [diff] [blame] | 1918 | // We want to avoid creating an unnecessary temporary+copy here; |
| 1919 | // however, we need one in two cases: |
| 1920 | // 1. If the argument is not byval, and we are required to copy the |
| 1921 | // source. (This case doesn't occur on any common architecture.) |
| 1922 | // 2. If the argument is byval, RV is not sufficiently aligned, and |
| 1923 | // we cannot force it to be sufficiently aligned. |
Eli Friedman | 97cb5a4 | 2011-06-15 22:09:18 +0000 | [diff] [blame] | 1924 | llvm::Value *Addr = RV.getAggregateAddr(); |
| 1925 | unsigned Align = ArgInfo.getIndirectAlign(); |
| 1926 | const llvm::TargetData *TD = &CGM.getTargetData(); |
| 1927 | if ((!ArgInfo.getIndirectByVal() && I->NeedsCopy) || |
| 1928 | (ArgInfo.getIndirectByVal() && TypeAlign < Align && |
| 1929 | llvm::getOrEnforceKnownAlignment(Addr, Align, TD) < Align)) { |
Eli Friedman | ea5e4da | 2011-06-14 01:37:52 +0000 | [diff] [blame] | 1930 | // Create an aligned temporary, and copy to it. |
Eli Friedman | 97cb5a4 | 2011-06-15 22:09:18 +0000 | [diff] [blame] | 1931 | llvm::AllocaInst *AI = CreateMemTemp(I->Ty); |
| 1932 | if (Align > AI->getAlignment()) |
| 1933 | AI->setAlignment(Align); |
Eli Friedman | ea5e4da | 2011-06-14 01:37:52 +0000 | [diff] [blame] | 1934 | Args.push_back(AI); |
Chad Rosier | 649b4a1 | 2012-03-29 17:37:10 +0000 | [diff] [blame] | 1935 | EmitAggregateCopy(AI, Addr, I->Ty, RV.isVolatileQualified()); |
Chris Lattner | 7085544 | 2011-07-12 04:46:18 +0000 | [diff] [blame] | 1936 | |
| 1937 | // Validate argument match. |
| 1938 | checkArgMatches(AI, IRArgNo, IRFuncTy); |
Eli Friedman | ea5e4da | 2011-06-14 01:37:52 +0000 | [diff] [blame] | 1939 | } else { |
| 1940 | // Skip the extra memcpy call. |
Eli Friedman | 97cb5a4 | 2011-06-15 22:09:18 +0000 | [diff] [blame] | 1941 | Args.push_back(Addr); |
Chris Lattner | 7085544 | 2011-07-12 04:46:18 +0000 | [diff] [blame] | 1942 | |
| 1943 | // Validate argument match. |
| 1944 | checkArgMatches(Addr, IRArgNo, IRFuncTy); |
Eli Friedman | ea5e4da | 2011-06-14 01:37:52 +0000 | [diff] [blame] | 1945 | } |
Daniel Dunbar | 1f74598 | 2009-02-05 09:16:39 +0000 | [diff] [blame] | 1946 | } |
| 1947 | break; |
Daniel Dunbar | 91a16fa | 2010-08-21 02:24:36 +0000 | [diff] [blame] | 1948 | } |
Daniel Dunbar | 1f74598 | 2009-02-05 09:16:39 +0000 | [diff] [blame] | 1949 | |
Daniel Dunbar | 1143492 | 2009-01-26 21:26:08 +0000 | [diff] [blame] | 1950 | case ABIArgInfo::Ignore: |
| 1951 | break; |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 1952 | |
Chris Lattner | 800588f | 2010-07-29 06:26:06 +0000 | [diff] [blame] | 1953 | case ABIArgInfo::Extend: |
| 1954 | case ABIArgInfo::Direct: { |
Akira Hatanaka | f0cc208 | 2012-01-07 00:25:33 +0000 | [diff] [blame] | 1955 | // Insert a padding argument to ensure proper alignment. |
| 1956 | if (llvm::Type *PaddingType = ArgInfo.getPaddingType()) { |
| 1957 | Args.push_back(llvm::UndefValue::get(PaddingType)); |
| 1958 | ++IRArgNo; |
| 1959 | } |
| 1960 | |
Chris Lattner | 800588f | 2010-07-29 06:26:06 +0000 | [diff] [blame] | 1961 | if (!isa<llvm::StructType>(ArgInfo.getCoerceToType()) && |
Chris Lattner | 117e3f4 | 2010-07-30 04:02:24 +0000 | [diff] [blame] | 1962 | ArgInfo.getCoerceToType() == ConvertType(info_it->type) && |
| 1963 | ArgInfo.getDirectOffset() == 0) { |
Chris Lattner | 7085544 | 2011-07-12 04:46:18 +0000 | [diff] [blame] | 1964 | llvm::Value *V; |
Chris Lattner | 800588f | 2010-07-29 06:26:06 +0000 | [diff] [blame] | 1965 | if (RV.isScalar()) |
Chris Lattner | 7085544 | 2011-07-12 04:46:18 +0000 | [diff] [blame] | 1966 | V = RV.getScalarVal(); |
Chris Lattner | 800588f | 2010-07-29 06:26:06 +0000 | [diff] [blame] | 1967 | else |
Chris Lattner | 7085544 | 2011-07-12 04:46:18 +0000 | [diff] [blame] | 1968 | V = Builder.CreateLoad(RV.getAggregateAddr()); |
| 1969 | |
Chris Lattner | 21ca1fd | 2011-07-12 04:53:39 +0000 | [diff] [blame] | 1970 | // If the argument doesn't match, perform a bitcast to coerce it. This |
| 1971 | // can happen due to trivial type mismatches. |
| 1972 | if (IRArgNo < IRFuncTy->getNumParams() && |
| 1973 | V->getType() != IRFuncTy->getParamType(IRArgNo)) |
| 1974 | V = Builder.CreateBitCast(V, IRFuncTy->getParamType(IRArgNo)); |
Chris Lattner | 7085544 | 2011-07-12 04:46:18 +0000 | [diff] [blame] | 1975 | Args.push_back(V); |
| 1976 | |
Chris Lattner | 7085544 | 2011-07-12 04:46:18 +0000 | [diff] [blame] | 1977 | checkArgMatches(V, IRArgNo, IRFuncTy); |
Chris Lattner | 800588f | 2010-07-29 06:26:06 +0000 | [diff] [blame] | 1978 | break; |
| 1979 | } |
Daniel Dunbar | 1143492 | 2009-01-26 21:26:08 +0000 | [diff] [blame] | 1980 | |
Daniel Dunbar | 89c9d8e | 2009-02-03 19:12:28 +0000 | [diff] [blame] | 1981 | // FIXME: Avoid the conversion through memory if possible. |
| 1982 | llvm::Value *SrcPtr; |
| 1983 | if (RV.isScalar()) { |
Eli Friedman | c6d0782 | 2011-05-02 18:05:27 +0000 | [diff] [blame] | 1984 | SrcPtr = CreateMemTemp(I->Ty, "coerce"); |
Eli Friedman | 97cb5a4 | 2011-06-15 22:09:18 +0000 | [diff] [blame] | 1985 | EmitStoreOfScalar(RV.getScalarVal(), SrcPtr, false, TypeAlign, I->Ty); |
Daniel Dunbar | 89c9d8e | 2009-02-03 19:12:28 +0000 | [diff] [blame] | 1986 | } else if (RV.isComplex()) { |
Eli Friedman | c6d0782 | 2011-05-02 18:05:27 +0000 | [diff] [blame] | 1987 | SrcPtr = CreateMemTemp(I->Ty, "coerce"); |
Daniel Dunbar | 89c9d8e | 2009-02-03 19:12:28 +0000 | [diff] [blame] | 1988 | StoreComplexToAddr(RV.getComplexVal(), SrcPtr, false); |
Mike Stump | 1eb4433 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 1989 | } else |
Daniel Dunbar | 89c9d8e | 2009-02-03 19:12:28 +0000 | [diff] [blame] | 1990 | SrcPtr = RV.getAggregateAddr(); |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 1991 | |
Chris Lattner | 117e3f4 | 2010-07-30 04:02:24 +0000 | [diff] [blame] | 1992 | // If the value is offset in memory, apply the offset now. |
| 1993 | if (unsigned Offs = ArgInfo.getDirectOffset()) { |
| 1994 | SrcPtr = Builder.CreateBitCast(SrcPtr, Builder.getInt8PtrTy()); |
| 1995 | SrcPtr = Builder.CreateConstGEP1_32(SrcPtr, Offs); |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 1996 | SrcPtr = Builder.CreateBitCast(SrcPtr, |
Chris Lattner | 117e3f4 | 2010-07-30 04:02:24 +0000 | [diff] [blame] | 1997 | llvm::PointerType::getUnqual(ArgInfo.getCoerceToType())); |
| 1998 | |
| 1999 | } |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 2000 | |
Chris Lattner | ce70016 | 2010-06-28 23:44:11 +0000 | [diff] [blame] | 2001 | // If the coerce-to type is a first class aggregate, we flatten it and |
| 2002 | // pass the elements. Either way is semantically identical, but fast-isel |
| 2003 | // and the optimizer generally likes scalar values better than FCAs. |
Chris Lattner | 2acc6e3 | 2011-07-18 04:24:23 +0000 | [diff] [blame] | 2004 | if (llvm::StructType *STy = |
Chris Lattner | 309c59f | 2010-06-29 00:06:42 +0000 | [diff] [blame] | 2005 | dyn_cast<llvm::StructType>(ArgInfo.getCoerceToType())) { |
Chris Lattner | 9282688 | 2010-07-05 20:41:41 +0000 | [diff] [blame] | 2006 | SrcPtr = Builder.CreateBitCast(SrcPtr, |
| 2007 | llvm::PointerType::getUnqual(STy)); |
| 2008 | for (unsigned i = 0, e = STy->getNumElements(); i != e; ++i) { |
| 2009 | llvm::Value *EltPtr = Builder.CreateConstGEP2_32(SrcPtr, 0, i); |
Chris Lattner | deabde2 | 2010-07-28 18:24:28 +0000 | [diff] [blame] | 2010 | llvm::LoadInst *LI = Builder.CreateLoad(EltPtr); |
| 2011 | // We don't know what we're loading from. |
| 2012 | LI->setAlignment(1); |
| 2013 | Args.push_back(LI); |
Chris Lattner | 7085544 | 2011-07-12 04:46:18 +0000 | [diff] [blame] | 2014 | |
| 2015 | // Validate argument match. |
| 2016 | checkArgMatches(LI, IRArgNo, IRFuncTy); |
Chris Lattner | 309c59f | 2010-06-29 00:06:42 +0000 | [diff] [blame] | 2017 | } |
Chris Lattner | ce70016 | 2010-06-28 23:44:11 +0000 | [diff] [blame] | 2018 | } else { |
Chris Lattner | 309c59f | 2010-06-29 00:06:42 +0000 | [diff] [blame] | 2019 | // In the simple case, just pass the coerced loaded value. |
| 2020 | Args.push_back(CreateCoercedLoad(SrcPtr, ArgInfo.getCoerceToType(), |
| 2021 | *this)); |
Chris Lattner | 7085544 | 2011-07-12 04:46:18 +0000 | [diff] [blame] | 2022 | |
| 2023 | // Validate argument match. |
| 2024 | checkArgMatches(Args.back(), IRArgNo, IRFuncTy); |
Chris Lattner | ce70016 | 2010-06-28 23:44:11 +0000 | [diff] [blame] | 2025 | } |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 2026 | |
Daniel Dunbar | 89c9d8e | 2009-02-03 19:12:28 +0000 | [diff] [blame] | 2027 | break; |
| 2028 | } |
| 2029 | |
Daniel Dunbar | 5627377 | 2008-09-17 00:51:38 +0000 | [diff] [blame] | 2030 | case ABIArgInfo::Expand: |
Chris Lattner | 811bf36 | 2011-07-12 06:29:11 +0000 | [diff] [blame] | 2031 | ExpandTypeToArgs(I->Ty, RV, Args, IRFuncTy); |
Chris Lattner | 7085544 | 2011-07-12 04:46:18 +0000 | [diff] [blame] | 2032 | IRArgNo = Args.size(); |
Daniel Dunbar | 5627377 | 2008-09-17 00:51:38 +0000 | [diff] [blame] | 2033 | break; |
Daniel Dunbar | 17b708d | 2008-09-09 23:27:19 +0000 | [diff] [blame] | 2034 | } |
| 2035 | } |
Mike Stump | 1eb4433 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 2036 | |
Chris Lattner | 5db7ae5 | 2009-06-13 00:26:38 +0000 | [diff] [blame] | 2037 | // If the callee is a bitcast of a function to a varargs pointer to function |
| 2038 | // type, check to see if we can remove the bitcast. This handles some cases |
| 2039 | // with unprototyped functions. |
| 2040 | if (llvm::ConstantExpr *CE = dyn_cast<llvm::ConstantExpr>(Callee)) |
| 2041 | if (llvm::Function *CalleeF = dyn_cast<llvm::Function>(CE->getOperand(0))) { |
Chris Lattner | 2acc6e3 | 2011-07-18 04:24:23 +0000 | [diff] [blame] | 2042 | llvm::PointerType *CurPT=cast<llvm::PointerType>(Callee->getType()); |
| 2043 | llvm::FunctionType *CurFT = |
Chris Lattner | 5db7ae5 | 2009-06-13 00:26:38 +0000 | [diff] [blame] | 2044 | cast<llvm::FunctionType>(CurPT->getElementType()); |
Chris Lattner | 2acc6e3 | 2011-07-18 04:24:23 +0000 | [diff] [blame] | 2045 | llvm::FunctionType *ActualFT = CalleeF->getFunctionType(); |
Mike Stump | 1eb4433 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 2046 | |
Chris Lattner | 5db7ae5 | 2009-06-13 00:26:38 +0000 | [diff] [blame] | 2047 | if (CE->getOpcode() == llvm::Instruction::BitCast && |
| 2048 | ActualFT->getReturnType() == CurFT->getReturnType() && |
Chris Lattner | d6bebbf | 2009-06-23 01:38:41 +0000 | [diff] [blame] | 2049 | ActualFT->getNumParams() == CurFT->getNumParams() && |
Fariborz Jahanian | c0ddef2 | 2011-03-01 17:28:13 +0000 | [diff] [blame] | 2050 | ActualFT->getNumParams() == Args.size() && |
| 2051 | (CurFT->isVarArg() || !ActualFT->isVarArg())) { |
Chris Lattner | 5db7ae5 | 2009-06-13 00:26:38 +0000 | [diff] [blame] | 2052 | bool ArgsMatch = true; |
| 2053 | for (unsigned i = 0, e = ActualFT->getNumParams(); i != e; ++i) |
| 2054 | if (ActualFT->getParamType(i) != CurFT->getParamType(i)) { |
| 2055 | ArgsMatch = false; |
| 2056 | break; |
| 2057 | } |
Mike Stump | 1eb4433 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 2058 | |
Chris Lattner | 5db7ae5 | 2009-06-13 00:26:38 +0000 | [diff] [blame] | 2059 | // Strip the cast if we can get away with it. This is a nice cleanup, |
| 2060 | // but also allows us to inline the function at -O0 if it is marked |
| 2061 | // always_inline. |
| 2062 | if (ArgsMatch) |
| 2063 | Callee = CalleeF; |
| 2064 | } |
| 2065 | } |
Mike Stump | 1eb4433 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 2066 | |
Daniel Dunbar | ca6408c | 2009-09-12 00:59:20 +0000 | [diff] [blame] | 2067 | unsigned CallingConv; |
Devang Patel | 761d7f7 | 2008-09-25 21:02:23 +0000 | [diff] [blame] | 2068 | CodeGen::AttributeListType AttributeList; |
Daniel Dunbar | ca6408c | 2009-09-12 00:59:20 +0000 | [diff] [blame] | 2069 | CGM.ConstructAttributeList(CallInfo, TargetDecl, AttributeList, CallingConv); |
Daniel Dunbar | 9834ffb | 2009-02-23 17:26:39 +0000 | [diff] [blame] | 2070 | llvm::AttrListPtr Attrs = llvm::AttrListPtr::get(AttributeList.begin(), |
| 2071 | AttributeList.end()); |
Mike Stump | 1eb4433 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 2072 | |
John McCall | f1549f6 | 2010-07-06 01:34:17 +0000 | [diff] [blame] | 2073 | llvm::BasicBlock *InvokeDest = 0; |
| 2074 | if (!(Attrs.getFnAttributes() & llvm::Attribute::NoUnwind)) |
| 2075 | InvokeDest = getInvokeDest(); |
| 2076 | |
Daniel Dunbar | d14151d | 2009-03-02 04:32:35 +0000 | [diff] [blame] | 2077 | llvm::CallSite CS; |
John McCall | f1549f6 | 2010-07-06 01:34:17 +0000 | [diff] [blame] | 2078 | if (!InvokeDest) { |
Jay Foad | 4c7d9f1 | 2011-07-15 08:37:34 +0000 | [diff] [blame] | 2079 | CS = Builder.CreateCall(Callee, Args); |
Daniel Dunbar | 9834ffb | 2009-02-23 17:26:39 +0000 | [diff] [blame] | 2080 | } else { |
| 2081 | llvm::BasicBlock *Cont = createBasicBlock("invoke.cont"); |
Jay Foad | 4c7d9f1 | 2011-07-15 08:37:34 +0000 | [diff] [blame] | 2082 | CS = Builder.CreateInvoke(Callee, Cont, InvokeDest, Args); |
Daniel Dunbar | 9834ffb | 2009-02-23 17:26:39 +0000 | [diff] [blame] | 2083 | EmitBlock(Cont); |
Daniel Dunbar | f4fe0f0 | 2009-02-20 18:54:31 +0000 | [diff] [blame] | 2084 | } |
Chris Lattner | ce93399 | 2010-06-29 16:40:28 +0000 | [diff] [blame] | 2085 | if (callOrInvoke) |
David Chisnall | 4b02afc | 2010-05-02 13:41:58 +0000 | [diff] [blame] | 2086 | *callOrInvoke = CS.getInstruction(); |
Daniel Dunbar | f4fe0f0 | 2009-02-20 18:54:31 +0000 | [diff] [blame] | 2087 | |
Daniel Dunbar | d14151d | 2009-03-02 04:32:35 +0000 | [diff] [blame] | 2088 | CS.setAttributes(Attrs); |
Daniel Dunbar | ca6408c | 2009-09-12 00:59:20 +0000 | [diff] [blame] | 2089 | CS.setCallingConv(static_cast<llvm::CallingConv::ID>(CallingConv)); |
Daniel Dunbar | d14151d | 2009-03-02 04:32:35 +0000 | [diff] [blame] | 2090 | |
Dan Gohman | b49bd27 | 2012-02-16 00:57:37 +0000 | [diff] [blame] | 2091 | // In ObjC ARC mode with no ObjC ARC exception safety, tell the ARC |
| 2092 | // optimizer it can aggressively ignore unwind edges. |
David Blaikie | 4e4d084 | 2012-03-11 07:00:24 +0000 | [diff] [blame] | 2093 | if (CGM.getLangOpts().ObjCAutoRefCount) |
Dan Gohman | b49bd27 | 2012-02-16 00:57:37 +0000 | [diff] [blame] | 2094 | AddObjCARCExceptionMetadata(CS.getInstruction()); |
| 2095 | |
Daniel Dunbar | d14151d | 2009-03-02 04:32:35 +0000 | [diff] [blame] | 2096 | // If the call doesn't return, finish the basic block and clear the |
| 2097 | // insertion point; this allows the rest of IRgen to discard |
| 2098 | // unreachable code. |
| 2099 | if (CS.doesNotReturn()) { |
| 2100 | Builder.CreateUnreachable(); |
| 2101 | Builder.ClearInsertionPoint(); |
Mike Stump | 1eb4433 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 2102 | |
Mike Stump | f5408fe | 2009-05-16 07:57:57 +0000 | [diff] [blame] | 2103 | // FIXME: For now, emit a dummy basic block because expr emitters in |
| 2104 | // generally are not ready to handle emitting expressions at unreachable |
| 2105 | // points. |
Daniel Dunbar | d14151d | 2009-03-02 04:32:35 +0000 | [diff] [blame] | 2106 | EnsureInsertPoint(); |
Mike Stump | 1eb4433 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 2107 | |
Daniel Dunbar | d14151d | 2009-03-02 04:32:35 +0000 | [diff] [blame] | 2108 | // Return a reasonable RValue. |
| 2109 | return GetUndefRValue(RetTy); |
Mike Stump | 1eb4433 | 2009-09-09 15:08:12 +0000 | [diff] [blame] | 2110 | } |
Daniel Dunbar | d14151d | 2009-03-02 04:32:35 +0000 | [diff] [blame] | 2111 | |
| 2112 | llvm::Instruction *CI = CS.getInstruction(); |
Benjamin Kramer | ffbb15e | 2009-10-05 13:47:21 +0000 | [diff] [blame] | 2113 | if (Builder.isNamePreserving() && !CI->getType()->isVoidTy()) |
Daniel Dunbar | 17b708d | 2008-09-09 23:27:19 +0000 | [diff] [blame] | 2114 | CI->setName("call"); |
Daniel Dunbar | 2c8e0f3 | 2008-09-10 02:41:04 +0000 | [diff] [blame] | 2115 | |
John McCall | f85e193 | 2011-06-15 23:02:42 +0000 | [diff] [blame] | 2116 | // Emit any writebacks immediately. Arguably this should happen |
| 2117 | // after any return-value munging. |
| 2118 | if (CallArgs.hasWritebacks()) |
| 2119 | emitWritebacks(*this, CallArgs); |
| 2120 | |
Daniel Dunbar | 2c8e0f3 | 2008-09-10 02:41:04 +0000 | [diff] [blame] | 2121 | switch (RetAI.getKind()) { |
Daniel Dunbar | 91a16fa | 2010-08-21 02:24:36 +0000 | [diff] [blame] | 2122 | case ABIArgInfo::Indirect: { |
| 2123 | unsigned Alignment = getContext().getTypeAlignInChars(RetTy).getQuantity(); |
Daniel Dunbar | 2c8e0f3 | 2008-09-10 02:41:04 +0000 | [diff] [blame] | 2124 | if (RetTy->isAnyComplexType()) |
Daniel Dunbar | 5627377 | 2008-09-17 00:51:38 +0000 | [diff] [blame] | 2125 | return RValue::getComplex(LoadComplexFromAddr(Args[0], false)); |
Chris Lattner | 3403084 | 2009-03-22 00:32:22 +0000 | [diff] [blame] | 2126 | if (CodeGenFunction::hasAggregateLLVMType(RetTy)) |
Daniel Dunbar | 5627377 | 2008-09-17 00:51:38 +0000 | [diff] [blame] | 2127 | return RValue::getAggregate(Args[0]); |
Daniel Dunbar | 91a16fa | 2010-08-21 02:24:36 +0000 | [diff] [blame] | 2128 | return RValue::get(EmitLoadOfScalar(Args[0], false, Alignment, RetTy)); |
| 2129 | } |
Daniel Dunbar | 8951dbd | 2008-09-11 01:48:57 +0000 | [diff] [blame] | 2130 | |
Daniel Dunbar | 1143492 | 2009-01-26 21:26:08 +0000 | [diff] [blame] | 2131 | case ABIArgInfo::Ignore: |
Daniel Dunbar | 0bcc521 | 2009-02-03 06:30:17 +0000 | [diff] [blame] | 2132 | // If we are ignoring an argument that had a result, make sure to |
| 2133 | // construct the appropriate return value for our caller. |
Daniel Dunbar | 13e8173 | 2009-02-05 07:09:07 +0000 | [diff] [blame] | 2134 | return GetUndefRValue(RetTy); |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 2135 | |
Chris Lattner | 800588f | 2010-07-29 06:26:06 +0000 | [diff] [blame] | 2136 | case ABIArgInfo::Extend: |
| 2137 | case ABIArgInfo::Direct: { |
Chris Lattner | 6af13f3 | 2011-07-13 03:59:32 +0000 | [diff] [blame] | 2138 | llvm::Type *RetIRTy = ConvertType(RetTy); |
| 2139 | if (RetAI.getCoerceToType() == RetIRTy && RetAI.getDirectOffset() == 0) { |
Chris Lattner | 800588f | 2010-07-29 06:26:06 +0000 | [diff] [blame] | 2140 | if (RetTy->isAnyComplexType()) { |
| 2141 | llvm::Value *Real = Builder.CreateExtractValue(CI, 0); |
| 2142 | llvm::Value *Imag = Builder.CreateExtractValue(CI, 1); |
| 2143 | return RValue::getComplex(std::make_pair(Real, Imag)); |
| 2144 | } |
| 2145 | if (CodeGenFunction::hasAggregateLLVMType(RetTy)) { |
| 2146 | llvm::Value *DestPtr = ReturnValue.getValue(); |
| 2147 | bool DestIsVolatile = ReturnValue.isVolatile(); |
Daniel Dunbar | 1143492 | 2009-01-26 21:26:08 +0000 | [diff] [blame] | 2148 | |
Chris Lattner | 800588f | 2010-07-29 06:26:06 +0000 | [diff] [blame] | 2149 | if (!DestPtr) { |
| 2150 | DestPtr = CreateMemTemp(RetTy, "agg.tmp"); |
| 2151 | DestIsVolatile = false; |
| 2152 | } |
Eli Friedman | badea57 | 2011-05-17 21:08:01 +0000 | [diff] [blame] | 2153 | BuildAggStore(*this, CI, DestPtr, DestIsVolatile, false); |
Chris Lattner | 800588f | 2010-07-29 06:26:06 +0000 | [diff] [blame] | 2154 | return RValue::getAggregate(DestPtr); |
| 2155 | } |
Chris Lattner | 6af13f3 | 2011-07-13 03:59:32 +0000 | [diff] [blame] | 2156 | |
| 2157 | // If the argument doesn't match, perform a bitcast to coerce it. This |
| 2158 | // can happen due to trivial type mismatches. |
| 2159 | llvm::Value *V = CI; |
| 2160 | if (V->getType() != RetIRTy) |
| 2161 | V = Builder.CreateBitCast(V, RetIRTy); |
| 2162 | return RValue::get(V); |
Chris Lattner | 800588f | 2010-07-29 06:26:06 +0000 | [diff] [blame] | 2163 | } |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 2164 | |
Anders Carlsson | d2490a9 | 2009-12-24 20:40:36 +0000 | [diff] [blame] | 2165 | llvm::Value *DestPtr = ReturnValue.getValue(); |
| 2166 | bool DestIsVolatile = ReturnValue.isVolatile(); |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 2167 | |
Anders Carlsson | d2490a9 | 2009-12-24 20:40:36 +0000 | [diff] [blame] | 2168 | if (!DestPtr) { |
Daniel Dunbar | 195337d | 2010-02-09 02:48:28 +0000 | [diff] [blame] | 2169 | DestPtr = CreateMemTemp(RetTy, "coerce"); |
Anders Carlsson | d2490a9 | 2009-12-24 20:40:36 +0000 | [diff] [blame] | 2170 | DestIsVolatile = false; |
| 2171 | } |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 2172 | |
Chris Lattner | 117e3f4 | 2010-07-30 04:02:24 +0000 | [diff] [blame] | 2173 | // If the value is offset in memory, apply the offset now. |
| 2174 | llvm::Value *StorePtr = DestPtr; |
| 2175 | if (unsigned Offs = RetAI.getDirectOffset()) { |
| 2176 | StorePtr = Builder.CreateBitCast(StorePtr, Builder.getInt8PtrTy()); |
| 2177 | StorePtr = Builder.CreateConstGEP1_32(StorePtr, Offs); |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 2178 | StorePtr = Builder.CreateBitCast(StorePtr, |
Chris Lattner | 117e3f4 | 2010-07-30 04:02:24 +0000 | [diff] [blame] | 2179 | llvm::PointerType::getUnqual(RetAI.getCoerceToType())); |
| 2180 | } |
| 2181 | CreateCoercedStore(CI, StorePtr, DestIsVolatile, *this); |
Michael J. Spencer | 9cac494 | 2010-10-19 06:39:39 +0000 | [diff] [blame] | 2182 | |
Daniel Dunbar | 91a16fa | 2010-08-21 02:24:36 +0000 | [diff] [blame] | 2183 | unsigned Alignment = getContext().getTypeAlignInChars(RetTy).getQuantity(); |
Anders Carlsson | ad3d691 | 2008-11-25 22:21:48 +0000 | [diff] [blame] | 2184 | if (RetTy->isAnyComplexType()) |
Anders Carlsson | d2490a9 | 2009-12-24 20:40:36 +0000 | [diff] [blame] | 2185 | return RValue::getComplex(LoadComplexFromAddr(DestPtr, false)); |
Chris Lattner | 3403084 | 2009-03-22 00:32:22 +0000 | [diff] [blame] | 2186 | if (CodeGenFunction::hasAggregateLLVMType(RetTy)) |
Anders Carlsson | d2490a9 | 2009-12-24 20:40:36 +0000 | [diff] [blame] | 2187 | return RValue::getAggregate(DestPtr); |
Daniel Dunbar | 91a16fa | 2010-08-21 02:24:36 +0000 | [diff] [blame] | 2188 | return RValue::get(EmitLoadOfScalar(DestPtr, false, Alignment, RetTy)); |
Daniel Dunbar | 639ffe4 | 2008-09-10 07:04:09 +0000 | [diff] [blame] | 2189 | } |
Daniel Dunbar | 8951dbd | 2008-09-11 01:48:57 +0000 | [diff] [blame] | 2190 | |
Daniel Dunbar | 8951dbd | 2008-09-11 01:48:57 +0000 | [diff] [blame] | 2191 | case ABIArgInfo::Expand: |
David Blaikie | b219cfc | 2011-09-23 05:06:16 +0000 | [diff] [blame] | 2192 | llvm_unreachable("Invalid ABI kind for return argument"); |
Daniel Dunbar | 17b708d | 2008-09-09 23:27:19 +0000 | [diff] [blame] | 2193 | } |
Daniel Dunbar | 2c8e0f3 | 2008-09-10 02:41:04 +0000 | [diff] [blame] | 2194 | |
David Blaikie | b219cfc | 2011-09-23 05:06:16 +0000 | [diff] [blame] | 2195 | llvm_unreachable("Unhandled ABIArgInfo::Kind"); |
Daniel Dunbar | 17b708d | 2008-09-09 23:27:19 +0000 | [diff] [blame] | 2196 | } |
Daniel Dunbar | b4094ea | 2009-02-10 20:44:09 +0000 | [diff] [blame] | 2197 | |
| 2198 | /* VarArg handling */ |
| 2199 | |
| 2200 | llvm::Value *CodeGenFunction::EmitVAArg(llvm::Value *VAListAddr, QualType Ty) { |
| 2201 | return CGM.getTypes().getABIInfo().EmitVAArg(VAListAddr, Ty, *this); |
| 2202 | } |