blob: 74a47bfc2b68eecc5188dab6425866129620169c [file] [log] [blame]
Nick Lewycky5fa40c32013-10-01 21:51:38 +00001//===--- CGCall.cpp - Encapsulate calling convention details --------------===//
Daniel Dunbar3d7c90b2008-09-08 21:33:45 +00002//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// These classes wrap the information about a call or function
11// definition used to handle ABI compliancy.
12//
13//===----------------------------------------------------------------------===//
14
15#include "CGCall.h"
Chris Lattnere70a0072010-06-29 16:40:28 +000016#include "ABIInfo.h"
Chandler Carruth3a022472012-12-04 09:13:33 +000017#include "CGCXXABI.h"
Daniel Dunbar3d7c90b2008-09-08 21:33:45 +000018#include "CodeGenFunction.h"
Daniel Dunbarc68897d2008-09-10 00:41:16 +000019#include "CodeGenModule.h"
John McCalla729c622012-02-17 03:33:10 +000020#include "TargetInfo.h"
Daniel Dunbar3d7c90b2008-09-08 21:33:45 +000021#include "clang/AST/Decl.h"
Anders Carlssonb15b55c2009-04-03 22:48:58 +000022#include "clang/AST/DeclCXX.h"
Daniel Dunbar3d7c90b2008-09-08 21:33:45 +000023#include "clang/AST/DeclObjC.h"
Chandler Carruth3a022472012-12-04 09:13:33 +000024#include "clang/Basic/TargetInfo.h"
Mark Laceya8e7df32013-10-30 21:53:58 +000025#include "clang/CodeGen/CGFunctionInfo.h"
Chandler Carruth85098242010-06-15 23:19:56 +000026#include "clang/Frontend/CodeGenOptions.h"
Bill Wendling706469b2013-02-28 22:49:57 +000027#include "llvm/ADT/StringExtras.h"
Chandler Carruthffd55512013-01-02 11:45:17 +000028#include "llvm/IR/Attributes.h"
Chandler Carruthc80ceea2014-03-04 11:02:08 +000029#include "llvm/IR/CallSite.h"
Chandler Carruthffd55512013-01-02 11:45:17 +000030#include "llvm/IR/DataLayout.h"
31#include "llvm/IR/InlineAsm.h"
Reid Kleckner314ef7b2014-02-01 00:04:45 +000032#include "llvm/IR/Intrinsics.h"
David Majnemerdc012fa2015-04-22 21:38:15 +000033#include "llvm/IR/IntrinsicInst.h"
Eli Friedmanf7456192011-06-15 22:09:18 +000034#include "llvm/Transforms/Utils/Local.h"
Daniel Dunbar3d7c90b2008-09-08 21:33:45 +000035using namespace clang;
36using namespace CodeGen;
37
38/***/
39
John McCallab26cfa2010-02-05 21:31:56 +000040static unsigned ClangCallConvToLLVMCallConv(CallingConv CC) {
41 switch (CC) {
42 default: return llvm::CallingConv::C;
43 case CC_X86StdCall: return llvm::CallingConv::X86_StdCall;
44 case CC_X86FastCall: return llvm::CallingConv::X86_FastCall;
Douglas Gregora941dca2010-05-18 16:57:00 +000045 case CC_X86ThisCall: return llvm::CallingConv::X86_ThisCall;
Charles Davisb5a214e2013-08-30 04:39:01 +000046 case CC_X86_64Win64: return llvm::CallingConv::X86_64_Win64;
47 case CC_X86_64SysV: return llvm::CallingConv::X86_64_SysV;
Anton Korobeynikov231e8752011-04-14 20:06:49 +000048 case CC_AAPCS: return llvm::CallingConv::ARM_AAPCS;
49 case CC_AAPCS_VFP: return llvm::CallingConv::ARM_AAPCS_VFP;
Guy Benyeif0a014b2012-12-25 08:53:55 +000050 case CC_IntelOclBicc: return llvm::CallingConv::Intel_OCL_BI;
Reid Klecknerd7857f02014-10-24 17:42:17 +000051 // TODO: Add support for __pascal to LLVM.
52 case CC_X86Pascal: return llvm::CallingConv::C;
53 // TODO: Add support for __vectorcall to LLVM.
Reid Kleckner80944df2014-10-31 22:00:51 +000054 case CC_X86VectorCall: return llvm::CallingConv::X86_VectorCall;
Alexander Kornienko21de0ae2015-01-20 11:20:41 +000055 case CC_SpirFunction: return llvm::CallingConv::SPIR_FUNC;
56 case CC_SpirKernel: return llvm::CallingConv::SPIR_KERNEL;
John McCallab26cfa2010-02-05 21:31:56 +000057 }
58}
59
John McCall8ee376f2010-02-24 07:14:12 +000060/// Derives the 'this' type for codegen purposes, i.e. ignoring method
61/// qualification.
62/// FIXME: address space qualification?
John McCall2da83a32010-02-26 00:48:12 +000063static CanQualType GetThisType(ASTContext &Context, const CXXRecordDecl *RD) {
64 QualType RecTy = Context.getTagDeclType(RD)->getCanonicalTypeInternal();
65 return Context.getPointerType(CanQualType::CreateUnsafe(RecTy));
Daniel Dunbar7a95ca32008-09-10 04:01:49 +000066}
67
John McCall8ee376f2010-02-24 07:14:12 +000068/// Returns the canonical formal type of the given C++ method.
John McCall2da83a32010-02-26 00:48:12 +000069static CanQual<FunctionProtoType> GetFormalType(const CXXMethodDecl *MD) {
70 return MD->getType()->getCanonicalTypeUnqualified()
71 .getAs<FunctionProtoType>();
John McCall8ee376f2010-02-24 07:14:12 +000072}
73
74/// Returns the "extra-canonicalized" return type, which discards
75/// qualifiers on the return type. Codegen doesn't care about them,
76/// and it makes ABI code a little easier to be able to assume that
77/// all parameter and return types are top-level unqualified.
John McCall2da83a32010-02-26 00:48:12 +000078static CanQualType GetReturnType(QualType RetTy) {
79 return RetTy->getCanonicalTypeUnqualified().getUnqualifiedType();
John McCall8ee376f2010-02-24 07:14:12 +000080}
81
John McCall8dda7b22012-07-07 06:41:13 +000082/// Arrange the argument and result information for a value of the given
83/// unprototyped freestanding function type.
John McCall8ee376f2010-02-24 07:14:12 +000084const CGFunctionInfo &
John McCall8dda7b22012-07-07 06:41:13 +000085CodeGenTypes::arrangeFreeFunctionType(CanQual<FunctionNoProtoType> FTNP) {
John McCalla729c622012-02-17 03:33:10 +000086 // When translating an unprototyped function type, always use a
87 // variadic type.
Alp Toker314cc812014-01-25 16:55:45 +000088 return arrangeLLVMFunctionInfo(FTNP->getReturnType().getUnqualifiedType(),
Peter Collingbournef7706832014-12-12 23:41:25 +000089 /*instanceMethod=*/false,
90 /*chainCall=*/false, None,
91 FTNP->getExtInfo(), RequiredArgs(0));
John McCall8ee376f2010-02-24 07:14:12 +000092}
93
John McCall8dda7b22012-07-07 06:41:13 +000094/// Arrange the LLVM function layout for a value of the given function
Alexey Samsonove5ef3ca2014-08-13 23:55:54 +000095/// type, on top of any implicit parameters already stored.
96static const CGFunctionInfo &
Peter Collingbournef7706832014-12-12 23:41:25 +000097arrangeLLVMFunctionInfo(CodeGenTypes &CGT, bool instanceMethod,
Alexey Samsonove5ef3ca2014-08-13 23:55:54 +000098 SmallVectorImpl<CanQualType> &prefix,
99 CanQual<FunctionProtoType> FTP) {
John McCall8dda7b22012-07-07 06:41:13 +0000100 RequiredArgs required = RequiredArgs::forPrototypePlus(FTP, prefix.size());
Daniel Dunbarbf8c24a2009-02-02 23:23:47 +0000101 // FIXME: Kill copy.
Benjamin Kramerf9890422015-02-17 16:48:30 +0000102 prefix.append(FTP->param_type_begin(), FTP->param_type_end());
Alp Toker314cc812014-01-25 16:55:45 +0000103 CanQualType resultType = FTP->getReturnType().getUnqualifiedType();
Peter Collingbournef7706832014-12-12 23:41:25 +0000104 return CGT.arrangeLLVMFunctionInfo(resultType, instanceMethod,
105 /*chainCall=*/false, prefix,
Alexey Samsonove5ef3ca2014-08-13 23:55:54 +0000106 FTP->getExtInfo(), required);
John McCall8ee376f2010-02-24 07:14:12 +0000107}
108
John McCalla729c622012-02-17 03:33:10 +0000109/// Arrange the argument and result information for a value of the
John McCall8dda7b22012-07-07 06:41:13 +0000110/// given freestanding function type.
John McCall8ee376f2010-02-24 07:14:12 +0000111const CGFunctionInfo &
John McCall8dda7b22012-07-07 06:41:13 +0000112CodeGenTypes::arrangeFreeFunctionType(CanQual<FunctionProtoType> FTP) {
John McCalla729c622012-02-17 03:33:10 +0000113 SmallVector<CanQualType, 16> argTypes;
Peter Collingbournef7706832014-12-12 23:41:25 +0000114 return ::arrangeLLVMFunctionInfo(*this, /*instanceMethod=*/false, argTypes,
115 FTP);
Daniel Dunbar7feafc72009-09-11 22:24:53 +0000116}
117
Aaron Ballman0362a6d2013-12-18 16:23:37 +0000118static CallingConv getCallingConventionForDecl(const Decl *D, bool IsWindows) {
Daniel Dunbar7feafc72009-09-11 22:24:53 +0000119 // Set the appropriate calling convention for the Function.
120 if (D->hasAttr<StdCallAttr>())
John McCallab26cfa2010-02-05 21:31:56 +0000121 return CC_X86StdCall;
Daniel Dunbar7feafc72009-09-11 22:24:53 +0000122
123 if (D->hasAttr<FastCallAttr>())
John McCallab26cfa2010-02-05 21:31:56 +0000124 return CC_X86FastCall;
Daniel Dunbar7feafc72009-09-11 22:24:53 +0000125
Douglas Gregora941dca2010-05-18 16:57:00 +0000126 if (D->hasAttr<ThisCallAttr>())
127 return CC_X86ThisCall;
128
Reid Klecknerd7857f02014-10-24 17:42:17 +0000129 if (D->hasAttr<VectorCallAttr>())
130 return CC_X86VectorCall;
131
Dawn Perchik335e16b2010-09-03 01:29:35 +0000132 if (D->hasAttr<PascalAttr>())
133 return CC_X86Pascal;
134
Anton Korobeynikov231e8752011-04-14 20:06:49 +0000135 if (PcsAttr *PCS = D->getAttr<PcsAttr>())
136 return (PCS->getPCS() == PcsAttr::AAPCS ? CC_AAPCS : CC_AAPCS_VFP);
137
Guy Benyeif0a014b2012-12-25 08:53:55 +0000138 if (D->hasAttr<IntelOclBiccAttr>())
139 return CC_IntelOclBicc;
140
Aaron Ballman0362a6d2013-12-18 16:23:37 +0000141 if (D->hasAttr<MSABIAttr>())
142 return IsWindows ? CC_C : CC_X86_64Win64;
143
144 if (D->hasAttr<SysVABIAttr>())
145 return IsWindows ? CC_X86_64SysV : CC_C;
146
John McCallab26cfa2010-02-05 21:31:56 +0000147 return CC_C;
Daniel Dunbar7a95ca32008-09-10 04:01:49 +0000148}
149
John McCalla729c622012-02-17 03:33:10 +0000150/// Arrange the argument and result information for a call to an
151/// unknown C++ non-static member function of the given abstract type.
Timur Iskhodzhanov88fd4392013-08-21 06:25:03 +0000152/// (Zero value of RD means we don't have any meaningful "this" argument type,
153/// so fall back to a generic pointer type).
John McCalla729c622012-02-17 03:33:10 +0000154/// The member function must be an ordinary function, i.e. not a
155/// constructor or destructor.
156const CGFunctionInfo &
157CodeGenTypes::arrangeCXXMethodType(const CXXRecordDecl *RD,
158 const FunctionProtoType *FTP) {
159 SmallVector<CanQualType, 16> argTypes;
John McCall8ee376f2010-02-24 07:14:12 +0000160
Anders Carlsson2ee3c012009-10-03 19:43:08 +0000161 // Add the 'this' pointer.
Timur Iskhodzhanov88fd4392013-08-21 06:25:03 +0000162 if (RD)
163 argTypes.push_back(GetThisType(Context, RD));
164 else
165 argTypes.push_back(Context.VoidPtrTy);
John McCall8ee376f2010-02-24 07:14:12 +0000166
Alexey Samsonove5ef3ca2014-08-13 23:55:54 +0000167 return ::arrangeLLVMFunctionInfo(
168 *this, true, argTypes,
169 FTP->getCanonicalTypeUnqualified().getAs<FunctionProtoType>());
Anders Carlsson2ee3c012009-10-03 19:43:08 +0000170}
171
John McCalla729c622012-02-17 03:33:10 +0000172/// Arrange the argument and result information for a declaration or
173/// definition of the given C++ non-static member function. The
174/// member function must be an ordinary function, i.e. not a
175/// constructor or destructor.
176const CGFunctionInfo &
177CodeGenTypes::arrangeCXXMethodDeclaration(const CXXMethodDecl *MD) {
Benjamin Kramer60509af2013-09-09 14:48:42 +0000178 assert(!isa<CXXConstructorDecl>(MD) && "wrong method for constructors!");
John McCall0d635f52010-09-03 01:26:39 +0000179 assert(!isa<CXXDestructorDecl>(MD) && "wrong method for destructors!");
180
John McCalla729c622012-02-17 03:33:10 +0000181 CanQual<FunctionProtoType> prototype = GetFormalType(MD);
Mike Stump11289f42009-09-09 15:08:12 +0000182
John McCalla729c622012-02-17 03:33:10 +0000183 if (MD->isInstance()) {
184 // The abstract case is perfectly fine.
Mark Lacey5ea993b2013-10-02 20:35:23 +0000185 const CXXRecordDecl *ThisType = TheCXXABI.getThisArgumentTypeForMethod(MD);
Timur Iskhodzhanov88fd4392013-08-21 06:25:03 +0000186 return arrangeCXXMethodType(ThisType, prototype.getTypePtr());
John McCalla729c622012-02-17 03:33:10 +0000187 }
188
John McCall8dda7b22012-07-07 06:41:13 +0000189 return arrangeFreeFunctionType(prototype);
Anders Carlssonb15b55c2009-04-03 22:48:58 +0000190}
191
John McCalla729c622012-02-17 03:33:10 +0000192const CGFunctionInfo &
Rafael Espindola8d2a19b2014-09-08 16:01:27 +0000193CodeGenTypes::arrangeCXXStructorDeclaration(const CXXMethodDecl *MD,
194 StructorType Type) {
195
John McCalla729c622012-02-17 03:33:10 +0000196 SmallVector<CanQualType, 16> argTypes;
Rafael Espindola8d2a19b2014-09-08 16:01:27 +0000197 argTypes.push_back(GetThisType(Context, MD->getParent()));
Stephen Lin9dc6eef2013-06-30 20:40:16 +0000198
Rafael Espindola8d2a19b2014-09-08 16:01:27 +0000199 GlobalDecl GD;
200 if (auto *CD = dyn_cast<CXXConstructorDecl>(MD)) {
201 GD = GlobalDecl(CD, toCXXCtorType(Type));
202 } else {
203 auto *DD = dyn_cast<CXXDestructorDecl>(MD);
204 GD = GlobalDecl(DD, toCXXDtorType(Type));
205 }
Anders Carlsson82ba57c2009-11-25 03:15:49 +0000206
Rafael Espindola8d2a19b2014-09-08 16:01:27 +0000207 CanQual<FunctionProtoType> FTP = GetFormalType(MD);
John McCall5d865c322010-08-31 07:33:07 +0000208
209 // Add the formal parameters.
Benjamin Kramerf9890422015-02-17 16:48:30 +0000210 argTypes.append(FTP->param_type_begin(), FTP->param_type_end());
John McCall5d865c322010-08-31 07:33:07 +0000211
Rafael Espindola8d2a19b2014-09-08 16:01:27 +0000212 TheCXXABI.buildStructorSignature(MD, Type, argTypes);
Reid Kleckner89077a12013-12-17 19:46:40 +0000213
214 RequiredArgs required =
Rafael Espindola8d2a19b2014-09-08 16:01:27 +0000215 (MD->isVariadic() ? RequiredArgs(argTypes.size()) : RequiredArgs::All);
Reid Kleckner89077a12013-12-17 19:46:40 +0000216
John McCall8dda7b22012-07-07 06:41:13 +0000217 FunctionType::ExtInfo extInfo = FTP->getExtInfo();
David Majnemer0c0b6d92014-10-31 20:09:12 +0000218 CanQualType resultType = TheCXXABI.HasThisReturn(GD)
219 ? argTypes.front()
220 : TheCXXABI.hasMostDerivedReturn(GD)
221 ? CGM.getContext().VoidPtrTy
222 : Context.VoidTy;
Peter Collingbournef7706832014-12-12 23:41:25 +0000223 return arrangeLLVMFunctionInfo(resultType, /*instanceMethod=*/true,
224 /*chainCall=*/false, argTypes, extInfo,
225 required);
Anders Carlsson82ba57c2009-11-25 03:15:49 +0000226}
227
Reid Kleckner314ef7b2014-02-01 00:04:45 +0000228/// Arrange a call to a C++ method, passing the given arguments.
229const CGFunctionInfo &
230CodeGenTypes::arrangeCXXConstructorCall(const CallArgList &args,
231 const CXXConstructorDecl *D,
232 CXXCtorType CtorKind,
233 unsigned ExtraArgs) {
234 // FIXME: Kill copy.
235 SmallVector<CanQualType, 16> ArgTypes;
Alexey Samsonov3551e312014-08-13 20:06:24 +0000236 for (const auto &Arg : args)
237 ArgTypes.push_back(Context.getCanonicalParamType(Arg.Ty));
Reid Kleckner314ef7b2014-02-01 00:04:45 +0000238
239 CanQual<FunctionProtoType> FPT = GetFormalType(D);
240 RequiredArgs Required = RequiredArgs::forPrototypePlus(FPT, 1 + ExtraArgs);
241 GlobalDecl GD(D, CtorKind);
David Majnemer0c0b6d92014-10-31 20:09:12 +0000242 CanQualType ResultType = TheCXXABI.HasThisReturn(GD)
243 ? ArgTypes.front()
244 : TheCXXABI.hasMostDerivedReturn(GD)
245 ? CGM.getContext().VoidPtrTy
246 : Context.VoidTy;
Reid Kleckner314ef7b2014-02-01 00:04:45 +0000247
248 FunctionType::ExtInfo Info = FPT->getExtInfo();
Peter Collingbournef7706832014-12-12 23:41:25 +0000249 return arrangeLLVMFunctionInfo(ResultType, /*instanceMethod=*/true,
250 /*chainCall=*/false, ArgTypes, Info,
251 Required);
Reid Kleckner314ef7b2014-02-01 00:04:45 +0000252}
253
John McCalla729c622012-02-17 03:33:10 +0000254/// Arrange the argument and result information for the declaration or
255/// definition of the given function.
256const CGFunctionInfo &
257CodeGenTypes::arrangeFunctionDeclaration(const FunctionDecl *FD) {
Chris Lattnerbea5b622009-05-12 20:27:19 +0000258 if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(FD))
Anders Carlssonb15b55c2009-04-03 22:48:58 +0000259 if (MD->isInstance())
John McCalla729c622012-02-17 03:33:10 +0000260 return arrangeCXXMethodDeclaration(MD);
Mike Stump11289f42009-09-09 15:08:12 +0000261
John McCall2da83a32010-02-26 00:48:12 +0000262 CanQualType FTy = FD->getType()->getCanonicalTypeUnqualified();
John McCalla729c622012-02-17 03:33:10 +0000263
John McCall2da83a32010-02-26 00:48:12 +0000264 assert(isa<FunctionType>(FTy));
John McCalla729c622012-02-17 03:33:10 +0000265
266 // When declaring a function without a prototype, always use a
267 // non-variadic type.
268 if (isa<FunctionNoProtoType>(FTy)) {
269 CanQual<FunctionNoProtoType> noProto = FTy.getAs<FunctionNoProtoType>();
Peter Collingbournef7706832014-12-12 23:41:25 +0000270 return arrangeLLVMFunctionInfo(
271 noProto->getReturnType(), /*instanceMethod=*/false,
272 /*chainCall=*/false, None, noProto->getExtInfo(), RequiredArgs::All);
John McCalla729c622012-02-17 03:33:10 +0000273 }
274
John McCall2da83a32010-02-26 00:48:12 +0000275 assert(isa<FunctionProtoType>(FTy));
John McCall8dda7b22012-07-07 06:41:13 +0000276 return arrangeFreeFunctionType(FTy.getAs<FunctionProtoType>());
Daniel Dunbar3d7c90b2008-09-08 21:33:45 +0000277}
278
John McCalla729c622012-02-17 03:33:10 +0000279/// Arrange the argument and result information for the declaration or
280/// definition of an Objective-C method.
281const CGFunctionInfo &
282CodeGenTypes::arrangeObjCMethodDeclaration(const ObjCMethodDecl *MD) {
283 // It happens that this is the same as a call with no optional
284 // arguments, except also using the formal 'self' type.
285 return arrangeObjCMessageSendSignature(MD, MD->getSelfDecl()->getType());
286}
287
288/// Arrange the argument and result information for the function type
289/// through which to perform a send to the given Objective-C method,
290/// using the given receiver type. The receiver type is not always
291/// the 'self' type of the method or even an Objective-C pointer type.
292/// This is *not* the right method for actually performing such a
293/// message send, due to the possibility of optional arguments.
294const CGFunctionInfo &
295CodeGenTypes::arrangeObjCMessageSendSignature(const ObjCMethodDecl *MD,
296 QualType receiverType) {
297 SmallVector<CanQualType, 16> argTys;
298 argTys.push_back(Context.getCanonicalParamType(receiverType));
299 argTys.push_back(Context.getCanonicalParamType(Context.getObjCSelType()));
Daniel Dunbarbf8c24a2009-02-02 23:23:47 +0000300 // FIXME: Kill copy?
Aaron Ballman43b68be2014-03-07 17:50:17 +0000301 for (const auto *I : MD->params()) {
302 argTys.push_back(Context.getCanonicalParamType(I->getType()));
John McCall8ee376f2010-02-24 07:14:12 +0000303 }
John McCall31168b02011-06-15 23:02:42 +0000304
305 FunctionType::ExtInfo einfo;
Aaron Ballman0362a6d2013-12-18 16:23:37 +0000306 bool IsWindows = getContext().getTargetInfo().getTriple().isOSWindows();
307 einfo = einfo.withCallingConv(getCallingConventionForDecl(MD, IsWindows));
John McCall31168b02011-06-15 23:02:42 +0000308
David Blaikiebbafb8a2012-03-11 07:00:24 +0000309 if (getContext().getLangOpts().ObjCAutoRefCount &&
John McCall31168b02011-06-15 23:02:42 +0000310 MD->hasAttr<NSReturnsRetainedAttr>())
311 einfo = einfo.withProducesResult(true);
312
John McCalla729c622012-02-17 03:33:10 +0000313 RequiredArgs required =
314 (MD->isVariadic() ? RequiredArgs(argTys.size()) : RequiredArgs::All);
315
Peter Collingbournef7706832014-12-12 23:41:25 +0000316 return arrangeLLVMFunctionInfo(
317 GetReturnType(MD->getReturnType()), /*instanceMethod=*/false,
318 /*chainCall=*/false, argTys, einfo, required);
Daniel Dunbar3d7c90b2008-09-08 21:33:45 +0000319}
320
John McCalla729c622012-02-17 03:33:10 +0000321const CGFunctionInfo &
322CodeGenTypes::arrangeGlobalDeclaration(GlobalDecl GD) {
Anders Carlsson6710c532010-02-06 02:44:09 +0000323 // FIXME: Do we need to handle ObjCMethodDecl?
324 const FunctionDecl *FD = cast<FunctionDecl>(GD.getDecl());
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +0000325
Anders Carlsson6710c532010-02-06 02:44:09 +0000326 if (const CXXConstructorDecl *CD = dyn_cast<CXXConstructorDecl>(FD))
Rafael Espindola8d2a19b2014-09-08 16:01:27 +0000327 return arrangeCXXStructorDeclaration(CD, getFromCtorType(GD.getCtorType()));
Anders Carlsson6710c532010-02-06 02:44:09 +0000328
329 if (const CXXDestructorDecl *DD = dyn_cast<CXXDestructorDecl>(FD))
Rafael Espindola8d2a19b2014-09-08 16:01:27 +0000330 return arrangeCXXStructorDeclaration(DD, getFromDtorType(GD.getDtorType()));
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +0000331
John McCalla729c622012-02-17 03:33:10 +0000332 return arrangeFunctionDeclaration(FD);
Anders Carlsson6710c532010-02-06 02:44:09 +0000333}
334
Reid Klecknerc3473512014-08-29 21:43:29 +0000335/// Arrange a thunk that takes 'this' as the first parameter followed by
336/// varargs. Return a void pointer, regardless of the actual return type.
337/// The body of the thunk will end in a musttail call to a function of the
338/// correct type, and the caller will bitcast the function to the correct
339/// prototype.
340const CGFunctionInfo &
341CodeGenTypes::arrangeMSMemberPointerThunk(const CXXMethodDecl *MD) {
342 assert(MD->isVirtual() && "only virtual memptrs have thunks");
343 CanQual<FunctionProtoType> FTP = GetFormalType(MD);
344 CanQualType ArgTys[] = { GetThisType(Context, MD->getParent()) };
Peter Collingbournef7706832014-12-12 23:41:25 +0000345 return arrangeLLVMFunctionInfo(Context.VoidTy, /*instanceMethod=*/false,
346 /*chainCall=*/false, ArgTys,
Reid Klecknerc3473512014-08-29 21:43:29 +0000347 FTP->getExtInfo(), RequiredArgs(1));
348}
349
David Majnemerdfa6d202015-03-11 18:36:39 +0000350const CGFunctionInfo &
David Majnemer37fd66e2015-03-13 22:36:55 +0000351CodeGenTypes::arrangeMSCtorClosure(const CXXConstructorDecl *CD,
352 CXXCtorType CT) {
353 assert(CT == Ctor_CopyingClosure || CT == Ctor_DefaultClosure);
354
David Majnemerdfa6d202015-03-11 18:36:39 +0000355 CanQual<FunctionProtoType> FTP = GetFormalType(CD);
356 SmallVector<CanQualType, 2> ArgTys;
357 const CXXRecordDecl *RD = CD->getParent();
358 ArgTys.push_back(GetThisType(Context, RD));
David Majnemer37fd66e2015-03-13 22:36:55 +0000359 if (CT == Ctor_CopyingClosure)
360 ArgTys.push_back(*FTP->param_type_begin());
David Majnemerdfa6d202015-03-11 18:36:39 +0000361 if (RD->getNumVBases() > 0)
362 ArgTys.push_back(Context.IntTy);
363 CallingConv CC = Context.getDefaultCallingConvention(
364 /*IsVariadic=*/false, /*IsCXXMethod=*/true);
365 return arrangeLLVMFunctionInfo(Context.VoidTy, /*instanceMethod=*/true,
366 /*chainCall=*/false, ArgTys,
367 FunctionType::ExtInfo(CC), RequiredArgs::All);
368}
369
John McCallc818bbb2012-12-07 07:03:17 +0000370/// Arrange a call as unto a free function, except possibly with an
371/// additional number of formal parameters considered required.
372static const CGFunctionInfo &
373arrangeFreeFunctionLikeCall(CodeGenTypes &CGT,
Mark Lacey23455752013-10-10 20:57:00 +0000374 CodeGenModule &CGM,
John McCallc818bbb2012-12-07 07:03:17 +0000375 const CallArgList &args,
376 const FunctionType *fnType,
Peter Collingbournef7706832014-12-12 23:41:25 +0000377 unsigned numExtraRequiredArgs,
378 bool chainCall) {
John McCallc818bbb2012-12-07 07:03:17 +0000379 assert(args.size() >= numExtraRequiredArgs);
380
381 // In most cases, there are no optional arguments.
382 RequiredArgs required = RequiredArgs::All;
383
384 // If we have a variadic prototype, the required arguments are the
385 // extra prefix plus the arguments in the prototype.
386 if (const FunctionProtoType *proto = dyn_cast<FunctionProtoType>(fnType)) {
387 if (proto->isVariadic())
Alp Toker9cacbab2014-01-20 20:26:09 +0000388 required = RequiredArgs(proto->getNumParams() + numExtraRequiredArgs);
John McCallc818bbb2012-12-07 07:03:17 +0000389
390 // If we don't have a prototype at all, but we're supposed to
391 // explicitly use the variadic convention for unprototyped calls,
392 // treat all of the arguments as required but preserve the nominal
393 // possibility of variadics.
Mark Lacey23455752013-10-10 20:57:00 +0000394 } else if (CGM.getTargetCodeGenInfo()
395 .isNoProtoCallVariadic(args,
396 cast<FunctionNoProtoType>(fnType))) {
John McCallc818bbb2012-12-07 07:03:17 +0000397 required = RequiredArgs(args.size());
398 }
399
Peter Collingbournef7706832014-12-12 23:41:25 +0000400 // FIXME: Kill copy.
401 SmallVector<CanQualType, 16> argTypes;
402 for (const auto &arg : args)
403 argTypes.push_back(CGT.getContext().getCanonicalParamType(arg.Ty));
404 return CGT.arrangeLLVMFunctionInfo(GetReturnType(fnType->getReturnType()),
405 /*instanceMethod=*/false, chainCall,
406 argTypes, fnType->getExtInfo(), required);
John McCallc818bbb2012-12-07 07:03:17 +0000407}
408
John McCalla729c622012-02-17 03:33:10 +0000409/// Figure out the rules for calling a function with the given formal
410/// type using the given arguments. The arguments are necessary
411/// because the function might be unprototyped, in which case it's
412/// target-dependent in crazy ways.
413const CGFunctionInfo &
John McCall8dda7b22012-07-07 06:41:13 +0000414CodeGenTypes::arrangeFreeFunctionCall(const CallArgList &args,
Peter Collingbournef7706832014-12-12 23:41:25 +0000415 const FunctionType *fnType,
416 bool chainCall) {
417 return arrangeFreeFunctionLikeCall(*this, CGM, args, fnType,
418 chainCall ? 1 : 0, chainCall);
John McCallc818bbb2012-12-07 07:03:17 +0000419}
John McCalla729c622012-02-17 03:33:10 +0000420
John McCallc818bbb2012-12-07 07:03:17 +0000421/// A block function call is essentially a free-function call with an
422/// extra implicit argument.
423const CGFunctionInfo &
424CodeGenTypes::arrangeBlockFunctionCall(const CallArgList &args,
425 const FunctionType *fnType) {
Peter Collingbournef7706832014-12-12 23:41:25 +0000426 return arrangeFreeFunctionLikeCall(*this, CGM, args, fnType, 1,
427 /*chainCall=*/false);
John McCalla729c622012-02-17 03:33:10 +0000428}
429
430const CGFunctionInfo &
John McCall8dda7b22012-07-07 06:41:13 +0000431CodeGenTypes::arrangeFreeFunctionCall(QualType resultType,
432 const CallArgList &args,
433 FunctionType::ExtInfo info,
434 RequiredArgs required) {
Daniel Dunbarbf8c24a2009-02-02 23:23:47 +0000435 // FIXME: Kill copy.
John McCalla729c622012-02-17 03:33:10 +0000436 SmallVector<CanQualType, 16> argTypes;
Alexey Samsonov3551e312014-08-13 20:06:24 +0000437 for (const auto &Arg : args)
438 argTypes.push_back(Context.getCanonicalParamType(Arg.Ty));
Peter Collingbournef7706832014-12-12 23:41:25 +0000439 return arrangeLLVMFunctionInfo(
440 GetReturnType(resultType), /*instanceMethod=*/false,
441 /*chainCall=*/false, argTypes, info, required);
John McCall8dda7b22012-07-07 06:41:13 +0000442}
443
444/// Arrange a call to a C++ method, passing the given arguments.
445const CGFunctionInfo &
446CodeGenTypes::arrangeCXXMethodCall(const CallArgList &args,
447 const FunctionProtoType *FPT,
448 RequiredArgs required) {
449 // FIXME: Kill copy.
450 SmallVector<CanQualType, 16> argTypes;
Alexey Samsonov3551e312014-08-13 20:06:24 +0000451 for (const auto &Arg : args)
452 argTypes.push_back(Context.getCanonicalParamType(Arg.Ty));
John McCall8dda7b22012-07-07 06:41:13 +0000453
454 FunctionType::ExtInfo info = FPT->getExtInfo();
Peter Collingbournef7706832014-12-12 23:41:25 +0000455 return arrangeLLVMFunctionInfo(
456 GetReturnType(FPT->getReturnType()), /*instanceMethod=*/true,
457 /*chainCall=*/false, argTypes, info, required);
Daniel Dunbar3cd20632009-01-31 02:19:00 +0000458}
459
Reid Kleckner4982b822014-01-31 22:54:50 +0000460const CGFunctionInfo &CodeGenTypes::arrangeFreeFunctionDeclaration(
461 QualType resultType, const FunctionArgList &args,
462 const FunctionType::ExtInfo &info, bool isVariadic) {
Daniel Dunbarbf8c24a2009-02-02 23:23:47 +0000463 // FIXME: Kill copy.
John McCalla729c622012-02-17 03:33:10 +0000464 SmallVector<CanQualType, 16> argTypes;
Alexey Samsonov3551e312014-08-13 20:06:24 +0000465 for (auto Arg : args)
466 argTypes.push_back(Context.getCanonicalParamType(Arg->getType()));
John McCalla729c622012-02-17 03:33:10 +0000467
468 RequiredArgs required =
469 (isVariadic ? RequiredArgs(args.size()) : RequiredArgs::All);
Peter Collingbournef7706832014-12-12 23:41:25 +0000470 return arrangeLLVMFunctionInfo(
471 GetReturnType(resultType), /*instanceMethod=*/false,
472 /*chainCall=*/false, argTypes, info, required);
Daniel Dunbarbf8c24a2009-02-02 23:23:47 +0000473}
474
John McCalla729c622012-02-17 03:33:10 +0000475const CGFunctionInfo &CodeGenTypes::arrangeNullaryFunction() {
Peter Collingbournef7706832014-12-12 23:41:25 +0000476 return arrangeLLVMFunctionInfo(
477 getContext().VoidTy, /*instanceMethod=*/false, /*chainCall=*/false,
478 None, FunctionType::ExtInfo(), RequiredArgs::All);
John McCalla738c252011-03-09 04:27:21 +0000479}
480
John McCalla729c622012-02-17 03:33:10 +0000481/// Arrange the argument and result information for an abstract value
482/// of a given function type. This is the method which all of the
483/// above functions ultimately defer to.
484const CGFunctionInfo &
John McCall8dda7b22012-07-07 06:41:13 +0000485CodeGenTypes::arrangeLLVMFunctionInfo(CanQualType resultType,
Peter Collingbournef7706832014-12-12 23:41:25 +0000486 bool instanceMethod,
487 bool chainCall,
John McCall8dda7b22012-07-07 06:41:13 +0000488 ArrayRef<CanQualType> argTypes,
489 FunctionType::ExtInfo info,
490 RequiredArgs required) {
Saleem Abdulrasool32d1a962014-11-25 03:49:50 +0000491 assert(std::all_of(argTypes.begin(), argTypes.end(),
492 std::mem_fun_ref(&CanQualType::isCanonicalAsParam)));
John McCall2da83a32010-02-26 00:48:12 +0000493
John McCalla729c622012-02-17 03:33:10 +0000494 unsigned CC = ClangCallConvToLLVMCallConv(info.getCC());
John McCallab26cfa2010-02-05 21:31:56 +0000495
Daniel Dunbare0be8292009-02-03 00:07:12 +0000496 // Lookup or create unique function info.
497 llvm::FoldingSetNodeID ID;
Peter Collingbournef7706832014-12-12 23:41:25 +0000498 CGFunctionInfo::Profile(ID, instanceMethod, chainCall, info, required,
499 resultType, argTypes);
Daniel Dunbare0be8292009-02-03 00:07:12 +0000500
Craig Topper8a13c412014-05-21 05:09:00 +0000501 void *insertPos = nullptr;
John McCalla729c622012-02-17 03:33:10 +0000502 CGFunctionInfo *FI = FunctionInfos.FindNodeOrInsertPos(ID, insertPos);
Daniel Dunbare0be8292009-02-03 00:07:12 +0000503 if (FI)
504 return *FI;
505
John McCalla729c622012-02-17 03:33:10 +0000506 // Construct the function info. We co-allocate the ArgInfos.
Peter Collingbournef7706832014-12-12 23:41:25 +0000507 FI = CGFunctionInfo::create(CC, instanceMethod, chainCall, info,
508 resultType, argTypes, required);
John McCalla729c622012-02-17 03:33:10 +0000509 FunctionInfos.InsertNode(FI, insertPos);
Daniel Dunbar313321e2009-02-03 05:31:23 +0000510
David Blaikie82e95a32014-11-19 07:49:47 +0000511 bool inserted = FunctionsBeingProcessed.insert(FI).second;
512 (void)inserted;
John McCalla729c622012-02-17 03:33:10 +0000513 assert(inserted && "Recursively being processed?");
Chris Lattner6fb0ccf2011-07-15 05:16:14 +0000514
Daniel Dunbar313321e2009-02-03 05:31:23 +0000515 // Compute ABI information.
Chris Lattner22326a12010-07-29 02:31:05 +0000516 getABIInfo().computeInfo(*FI);
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +0000517
Chris Lattnerfe34c1d2010-07-29 06:26:06 +0000518 // Loop over all of the computed argument and return value info. If any of
519 // them are direct or extend without a specified coerce type, specify the
520 // default now.
John McCalla729c622012-02-17 03:33:10 +0000521 ABIArgInfo &retInfo = FI->getReturnInfo();
Craig Topper8a13c412014-05-21 05:09:00 +0000522 if (retInfo.canHaveCoerceToType() && retInfo.getCoerceToType() == nullptr)
John McCalla729c622012-02-17 03:33:10 +0000523 retInfo.setCoerceToType(ConvertType(FI->getReturnType()));
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +0000524
Aaron Ballmanec47bc22014-03-17 18:10:01 +0000525 for (auto &I : FI->arguments())
Craig Topper8a13c412014-05-21 05:09:00 +0000526 if (I.info.canHaveCoerceToType() && I.info.getCoerceToType() == nullptr)
Aaron Ballmanec47bc22014-03-17 18:10:01 +0000527 I.info.setCoerceToType(ConvertType(I.type));
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +0000528
John McCalla729c622012-02-17 03:33:10 +0000529 bool erased = FunctionsBeingProcessed.erase(FI); (void)erased;
530 assert(erased && "Not in set?");
Chris Lattner1a651332011-07-15 06:41:05 +0000531
Daniel Dunbare0be8292009-02-03 00:07:12 +0000532 return *FI;
Daniel Dunbarbf8c24a2009-02-02 23:23:47 +0000533}
534
John McCalla729c622012-02-17 03:33:10 +0000535CGFunctionInfo *CGFunctionInfo::create(unsigned llvmCC,
Peter Collingbournef7706832014-12-12 23:41:25 +0000536 bool instanceMethod,
537 bool chainCall,
John McCalla729c622012-02-17 03:33:10 +0000538 const FunctionType::ExtInfo &info,
539 CanQualType resultType,
540 ArrayRef<CanQualType> argTypes,
541 RequiredArgs required) {
542 void *buffer = operator new(sizeof(CGFunctionInfo) +
543 sizeof(ArgInfo) * (argTypes.size() + 1));
544 CGFunctionInfo *FI = new(buffer) CGFunctionInfo();
545 FI->CallingConvention = llvmCC;
546 FI->EffectiveCallingConvention = llvmCC;
547 FI->ASTCallingConvention = info.getCC();
Peter Collingbournef7706832014-12-12 23:41:25 +0000548 FI->InstanceMethod = instanceMethod;
549 FI->ChainCall = chainCall;
John McCalla729c622012-02-17 03:33:10 +0000550 FI->NoReturn = info.getNoReturn();
551 FI->ReturnsRetained = info.getProducesResult();
552 FI->Required = required;
553 FI->HasRegParm = info.getHasRegParm();
554 FI->RegParm = info.getRegParm();
Craig Topper8a13c412014-05-21 05:09:00 +0000555 FI->ArgStruct = nullptr;
John McCalla729c622012-02-17 03:33:10 +0000556 FI->NumArgs = argTypes.size();
557 FI->getArgsBuffer()[0].type = resultType;
558 for (unsigned i = 0, e = argTypes.size(); i != e; ++i)
559 FI->getArgsBuffer()[i + 1].type = argTypes[i];
560 return FI;
Daniel Dunbar313321e2009-02-03 05:31:23 +0000561}
562
563/***/
564
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000565namespace {
566// ABIArgInfo::Expand implementation.
567
568// Specifies the way QualType passed as ABIArgInfo::Expand is expanded.
569struct TypeExpansion {
570 enum TypeExpansionKind {
571 // Elements of constant arrays are expanded recursively.
572 TEK_ConstantArray,
573 // Record fields are expanded recursively (but if record is a union, only
574 // the field with the largest size is expanded).
575 TEK_Record,
576 // For complex types, real and imaginary parts are expanded recursively.
577 TEK_Complex,
578 // All other types are not expandable.
579 TEK_None
580 };
581
582 const TypeExpansionKind Kind;
583
584 TypeExpansion(TypeExpansionKind K) : Kind(K) {}
585 virtual ~TypeExpansion() {}
586};
587
588struct ConstantArrayExpansion : TypeExpansion {
589 QualType EltTy;
590 uint64_t NumElts;
591
592 ConstantArrayExpansion(QualType EltTy, uint64_t NumElts)
593 : TypeExpansion(TEK_ConstantArray), EltTy(EltTy), NumElts(NumElts) {}
594 static bool classof(const TypeExpansion *TE) {
595 return TE->Kind == TEK_ConstantArray;
596 }
597};
598
599struct RecordExpansion : TypeExpansion {
Reid Klecknere9f6a712014-10-31 17:10:41 +0000600 SmallVector<const CXXBaseSpecifier *, 1> Bases;
601
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000602 SmallVector<const FieldDecl *, 1> Fields;
603
Reid Klecknere9f6a712014-10-31 17:10:41 +0000604 RecordExpansion(SmallVector<const CXXBaseSpecifier *, 1> &&Bases,
605 SmallVector<const FieldDecl *, 1> &&Fields)
606 : TypeExpansion(TEK_Record), Bases(Bases), Fields(Fields) {}
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000607 static bool classof(const TypeExpansion *TE) {
608 return TE->Kind == TEK_Record;
609 }
610};
611
612struct ComplexExpansion : TypeExpansion {
613 QualType EltTy;
614
615 ComplexExpansion(QualType EltTy) : TypeExpansion(TEK_Complex), EltTy(EltTy) {}
616 static bool classof(const TypeExpansion *TE) {
617 return TE->Kind == TEK_Complex;
618 }
619};
620
621struct NoExpansion : TypeExpansion {
622 NoExpansion() : TypeExpansion(TEK_None) {}
623 static bool classof(const TypeExpansion *TE) {
624 return TE->Kind == TEK_None;
625 }
626};
627} // namespace
628
629static std::unique_ptr<TypeExpansion>
630getTypeExpansion(QualType Ty, const ASTContext &Context) {
631 if (const ConstantArrayType *AT = Context.getAsConstantArrayType(Ty)) {
632 return llvm::make_unique<ConstantArrayExpansion>(
633 AT->getElementType(), AT->getSize().getZExtValue());
634 }
635 if (const RecordType *RT = Ty->getAs<RecordType>()) {
Reid Klecknere9f6a712014-10-31 17:10:41 +0000636 SmallVector<const CXXBaseSpecifier *, 1> Bases;
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000637 SmallVector<const FieldDecl *, 1> Fields;
Bob Wilsone826a2a2011-08-03 05:58:22 +0000638 const RecordDecl *RD = RT->getDecl();
639 assert(!RD->hasFlexibleArrayMember() &&
640 "Cannot expand structure with flexible array.");
Anton Korobeynikov4215ca72012-04-13 11:22:00 +0000641 if (RD->isUnion()) {
642 // Unions can be here only in degenerative cases - all the fields are same
643 // after flattening. Thus we have to use the "largest" field.
Craig Topper8a13c412014-05-21 05:09:00 +0000644 const FieldDecl *LargestFD = nullptr;
Anton Korobeynikov4215ca72012-04-13 11:22:00 +0000645 CharUnits UnionSize = CharUnits::Zero();
646
Aaron Ballmane8a8bae2014-03-08 20:12:42 +0000647 for (const auto *FD : RD->fields()) {
Reid Kleckner80944df2014-10-31 22:00:51 +0000648 // Skip zero length bitfields.
649 if (FD->isBitField() && FD->getBitWidthValue(Context) == 0)
650 continue;
Anton Korobeynikov4215ca72012-04-13 11:22:00 +0000651 assert(!FD->isBitField() &&
652 "Cannot expand structure with bit-field members.");
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000653 CharUnits FieldSize = Context.getTypeSizeInChars(FD->getType());
Anton Korobeynikov4215ca72012-04-13 11:22:00 +0000654 if (UnionSize < FieldSize) {
655 UnionSize = FieldSize;
656 LargestFD = FD;
657 }
658 }
659 if (LargestFD)
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000660 Fields.push_back(LargestFD);
Anton Korobeynikov4215ca72012-04-13 11:22:00 +0000661 } else {
Reid Klecknere9f6a712014-10-31 17:10:41 +0000662 if (const auto *CXXRD = dyn_cast<CXXRecordDecl>(RD)) {
663 assert(!CXXRD->isDynamicClass() &&
664 "cannot expand vtable pointers in dynamic classes");
665 for (const CXXBaseSpecifier &BS : CXXRD->bases())
666 Bases.push_back(&BS);
667 }
668
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000669 for (const auto *FD : RD->fields()) {
Reid Kleckner80944df2014-10-31 22:00:51 +0000670 // Skip zero length bitfields.
671 if (FD->isBitField() && FD->getBitWidthValue(Context) == 0)
672 continue;
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000673 assert(!FD->isBitField() &&
Anton Korobeynikov4215ca72012-04-13 11:22:00 +0000674 "Cannot expand structure with bit-field members.");
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000675 Fields.push_back(FD);
Anton Korobeynikov4215ca72012-04-13 11:22:00 +0000676 }
Bob Wilsone826a2a2011-08-03 05:58:22 +0000677 }
Reid Klecknere9f6a712014-10-31 17:10:41 +0000678 return llvm::make_unique<RecordExpansion>(std::move(Bases),
679 std::move(Fields));
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000680 }
681 if (const ComplexType *CT = Ty->getAs<ComplexType>()) {
682 return llvm::make_unique<ComplexExpansion>(CT->getElementType());
683 }
684 return llvm::make_unique<NoExpansion>();
685}
686
Alexey Samsonov52c0f6a2014-09-29 20:30:22 +0000687static int getExpansionSize(QualType Ty, const ASTContext &Context) {
688 auto Exp = getTypeExpansion(Ty, Context);
689 if (auto CAExp = dyn_cast<ConstantArrayExpansion>(Exp.get())) {
690 return CAExp->NumElts * getExpansionSize(CAExp->EltTy, Context);
691 }
692 if (auto RExp = dyn_cast<RecordExpansion>(Exp.get())) {
693 int Res = 0;
Reid Klecknere9f6a712014-10-31 17:10:41 +0000694 for (auto BS : RExp->Bases)
695 Res += getExpansionSize(BS->getType(), Context);
Alexey Samsonov52c0f6a2014-09-29 20:30:22 +0000696 for (auto FD : RExp->Fields)
697 Res += getExpansionSize(FD->getType(), Context);
698 return Res;
699 }
700 if (isa<ComplexExpansion>(Exp.get()))
701 return 2;
702 assert(isa<NoExpansion>(Exp.get()));
703 return 1;
704}
705
Alexey Samsonov153004f2014-09-29 22:08:00 +0000706void
707CodeGenTypes::getExpandedTypes(QualType Ty,
708 SmallVectorImpl<llvm::Type *>::iterator &TI) {
709 auto Exp = getTypeExpansion(Ty, Context);
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000710 if (auto CAExp = dyn_cast<ConstantArrayExpansion>(Exp.get())) {
711 for (int i = 0, n = CAExp->NumElts; i < n; i++) {
Alexey Samsonov153004f2014-09-29 22:08:00 +0000712 getExpandedTypes(CAExp->EltTy, TI);
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000713 }
714 } else if (auto RExp = dyn_cast<RecordExpansion>(Exp.get())) {
Reid Klecknere9f6a712014-10-31 17:10:41 +0000715 for (auto BS : RExp->Bases)
716 getExpandedTypes(BS->getType(), TI);
717 for (auto FD : RExp->Fields)
Alexey Samsonov153004f2014-09-29 22:08:00 +0000718 getExpandedTypes(FD->getType(), TI);
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000719 } else if (auto CExp = dyn_cast<ComplexExpansion>(Exp.get())) {
720 llvm::Type *EltTy = ConvertType(CExp->EltTy);
Alexey Samsonov153004f2014-09-29 22:08:00 +0000721 *TI++ = EltTy;
722 *TI++ = EltTy;
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000723 } else {
724 assert(isa<NoExpansion>(Exp.get()));
Alexey Samsonov153004f2014-09-29 22:08:00 +0000725 *TI++ = ConvertType(Ty);
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000726 }
Daniel Dunbar8fc81b02008-09-17 00:51:38 +0000727}
728
Alexey Samsonov91cf4552014-08-22 01:06:06 +0000729void CodeGenFunction::ExpandTypeFromArgs(
730 QualType Ty, LValue LV, SmallVectorImpl<llvm::Argument *>::iterator &AI) {
Mike Stump11289f42009-09-09 15:08:12 +0000731 assert(LV.isSimple() &&
732 "Unexpected non-simple lvalue during struct expansion.");
Daniel Dunbar8fc81b02008-09-17 00:51:38 +0000733
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000734 auto Exp = getTypeExpansion(Ty, getContext());
735 if (auto CAExp = dyn_cast<ConstantArrayExpansion>(Exp.get())) {
736 for (int i = 0, n = CAExp->NumElts; i < n; i++) {
David Blaikie17ea2662015-04-04 21:07:17 +0000737 llvm::Value *EltAddr =
738 Builder.CreateConstGEP2_32(nullptr, LV.getAddress(), 0, i);
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000739 LValue LV = MakeAddrLValue(EltAddr, CAExp->EltTy);
740 ExpandTypeFromArgs(CAExp->EltTy, LV, AI);
Daniel Dunbar8fc81b02008-09-17 00:51:38 +0000741 }
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000742 } else if (auto RExp = dyn_cast<RecordExpansion>(Exp.get())) {
Reid Klecknere9f6a712014-10-31 17:10:41 +0000743 llvm::Value *This = LV.getAddress();
744 for (const CXXBaseSpecifier *BS : RExp->Bases) {
745 // Perform a single step derived-to-base conversion.
746 llvm::Value *Base =
747 GetAddressOfBaseClass(This, Ty->getAsCXXRecordDecl(), &BS, &BS + 1,
748 /*NullCheckValue=*/false, SourceLocation());
749 LValue SubLV = MakeAddrLValue(Base, BS->getType());
750
751 // Recurse onto bases.
752 ExpandTypeFromArgs(BS->getType(), SubLV, AI);
753 }
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000754 for (auto FD : RExp->Fields) {
755 // FIXME: What are the right qualifiers here?
756 LValue SubLV = EmitLValueForField(LV, FD);
757 ExpandTypeFromArgs(FD->getType(), SubLV, AI);
Bob Wilsone826a2a2011-08-03 05:58:22 +0000758 }
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000759 } else if (auto CExp = dyn_cast<ComplexExpansion>(Exp.get())) {
David Blaikie2e804282015-04-05 22:47:07 +0000760 llvm::Value *RealAddr =
761 Builder.CreateStructGEP(nullptr, LV.getAddress(), 0, "real");
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000762 EmitStoreThroughLValue(RValue::get(*AI++),
763 MakeAddrLValue(RealAddr, CExp->EltTy));
David Blaikie2e804282015-04-05 22:47:07 +0000764 llvm::Value *ImagAddr =
765 Builder.CreateStructGEP(nullptr, LV.getAddress(), 1, "imag");
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000766 EmitStoreThroughLValue(RValue::get(*AI++),
767 MakeAddrLValue(ImagAddr, CExp->EltTy));
768 } else {
769 assert(isa<NoExpansion>(Exp.get()));
770 EmitStoreThroughLValue(RValue::get(*AI++), LV);
Daniel Dunbar8fc81b02008-09-17 00:51:38 +0000771 }
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000772}
773
774void CodeGenFunction::ExpandTypeToArgs(
775 QualType Ty, RValue RV, llvm::FunctionType *IRFuncTy,
776 SmallVectorImpl<llvm::Value *> &IRCallArgs, unsigned &IRCallArgPos) {
777 auto Exp = getTypeExpansion(Ty, getContext());
778 if (auto CAExp = dyn_cast<ConstantArrayExpansion>(Exp.get())) {
779 llvm::Value *Addr = RV.getAggregateAddr();
780 for (int i = 0, n = CAExp->NumElts; i < n; i++) {
David Blaikie17ea2662015-04-04 21:07:17 +0000781 llvm::Value *EltAddr = Builder.CreateConstGEP2_32(nullptr, Addr, 0, i);
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000782 RValue EltRV =
783 convertTempToRValue(EltAddr, CAExp->EltTy, SourceLocation());
784 ExpandTypeToArgs(CAExp->EltTy, EltRV, IRFuncTy, IRCallArgs, IRCallArgPos);
785 }
786 } else if (auto RExp = dyn_cast<RecordExpansion>(Exp.get())) {
Reid Klecknere9f6a712014-10-31 17:10:41 +0000787 llvm::Value *This = RV.getAggregateAddr();
788 for (const CXXBaseSpecifier *BS : RExp->Bases) {
789 // Perform a single step derived-to-base conversion.
790 llvm::Value *Base =
791 GetAddressOfBaseClass(This, Ty->getAsCXXRecordDecl(), &BS, &BS + 1,
792 /*NullCheckValue=*/false, SourceLocation());
793 RValue BaseRV = RValue::getAggregate(Base);
794
795 // Recurse onto bases.
796 ExpandTypeToArgs(BS->getType(), BaseRV, IRFuncTy, IRCallArgs,
797 IRCallArgPos);
798 }
799
800 LValue LV = MakeAddrLValue(This, Ty);
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000801 for (auto FD : RExp->Fields) {
802 RValue FldRV = EmitRValueForField(LV, FD, SourceLocation());
803 ExpandTypeToArgs(FD->getType(), FldRV, IRFuncTy, IRCallArgs,
804 IRCallArgPos);
805 }
806 } else if (isa<ComplexExpansion>(Exp.get())) {
807 ComplexPairTy CV = RV.getComplexVal();
808 IRCallArgs[IRCallArgPos++] = CV.first;
809 IRCallArgs[IRCallArgPos++] = CV.second;
810 } else {
811 assert(isa<NoExpansion>(Exp.get()));
812 assert(RV.isScalar() &&
813 "Unexpected non-scalar rvalue during struct expansion.");
814
815 // Insert a bitcast as needed.
816 llvm::Value *V = RV.getScalarVal();
817 if (IRCallArgPos < IRFuncTy->getNumParams() &&
818 V->getType() != IRFuncTy->getParamType(IRCallArgPos))
819 V = Builder.CreateBitCast(V, IRFuncTy->getParamType(IRCallArgPos));
820
821 IRCallArgs[IRCallArgPos++] = V;
822 }
Daniel Dunbar8fc81b02008-09-17 00:51:38 +0000823}
824
Chris Lattner895c52b2010-06-27 06:04:18 +0000825/// EnterStructPointerForCoercedAccess - Given a struct pointer that we are
Chris Lattner1cd66982010-06-27 05:56:15 +0000826/// accessing some number of bytes out of it, try to gep into the struct to get
827/// at its inner goodness. Dive as deep as possible without entering an element
828/// with an in-memory size smaller than DstSize.
829static llvm::Value *
Chris Lattner895c52b2010-06-27 06:04:18 +0000830EnterStructPointerForCoercedAccess(llvm::Value *SrcPtr,
Chris Lattner2192fe52011-07-18 04:24:23 +0000831 llvm::StructType *SrcSTy,
Chris Lattner895c52b2010-06-27 06:04:18 +0000832 uint64_t DstSize, CodeGenFunction &CGF) {
Chris Lattner1cd66982010-06-27 05:56:15 +0000833 // We can't dive into a zero-element struct.
834 if (SrcSTy->getNumElements() == 0) return SrcPtr;
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +0000835
Chris Lattner2192fe52011-07-18 04:24:23 +0000836 llvm::Type *FirstElt = SrcSTy->getElementType(0);
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +0000837
Chris Lattner1cd66982010-06-27 05:56:15 +0000838 // If the first elt is at least as large as what we're looking for, or if the
James Molloy90d61012014-08-29 10:17:52 +0000839 // first element is the same size as the whole struct, we can enter it. The
840 // comparison must be made on the store size and not the alloca size. Using
841 // the alloca size may overstate the size of the load.
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +0000842 uint64_t FirstEltSize =
James Molloy90d61012014-08-29 10:17:52 +0000843 CGF.CGM.getDataLayout().getTypeStoreSize(FirstElt);
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +0000844 if (FirstEltSize < DstSize &&
James Molloy90d61012014-08-29 10:17:52 +0000845 FirstEltSize < CGF.CGM.getDataLayout().getTypeStoreSize(SrcSTy))
Chris Lattner1cd66982010-06-27 05:56:15 +0000846 return SrcPtr;
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +0000847
Chris Lattner1cd66982010-06-27 05:56:15 +0000848 // GEP into the first element.
David Blaikie17ea2662015-04-04 21:07:17 +0000849 SrcPtr = CGF.Builder.CreateConstGEP2_32(SrcSTy, SrcPtr, 0, 0, "coerce.dive");
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +0000850
Chris Lattner1cd66982010-06-27 05:56:15 +0000851 // If the first element is a struct, recurse.
Chris Lattner2192fe52011-07-18 04:24:23 +0000852 llvm::Type *SrcTy =
Chris Lattner1cd66982010-06-27 05:56:15 +0000853 cast<llvm::PointerType>(SrcPtr->getType())->getElementType();
Chris Lattner2192fe52011-07-18 04:24:23 +0000854 if (llvm::StructType *SrcSTy = dyn_cast<llvm::StructType>(SrcTy))
Chris Lattner895c52b2010-06-27 06:04:18 +0000855 return EnterStructPointerForCoercedAccess(SrcPtr, SrcSTy, DstSize, CGF);
Chris Lattner1cd66982010-06-27 05:56:15 +0000856
857 return SrcPtr;
858}
859
Chris Lattner055097f2010-06-27 06:26:04 +0000860/// CoerceIntOrPtrToIntOrPtr - Convert a value Val to the specific Ty where both
861/// are either integers or pointers. This does a truncation of the value if it
862/// is too large or a zero extension if it is too small.
Jakob Stoklund Olesen36af2522013-06-05 03:00:13 +0000863///
864/// This behaves as if the value were coerced through memory, so on big-endian
865/// targets the high bits are preserved in a truncation, while little-endian
866/// targets preserve the low bits.
Chris Lattner055097f2010-06-27 06:26:04 +0000867static llvm::Value *CoerceIntOrPtrToIntOrPtr(llvm::Value *Val,
Chris Lattner2192fe52011-07-18 04:24:23 +0000868 llvm::Type *Ty,
Chris Lattner055097f2010-06-27 06:26:04 +0000869 CodeGenFunction &CGF) {
870 if (Val->getType() == Ty)
871 return Val;
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +0000872
Chris Lattner055097f2010-06-27 06:26:04 +0000873 if (isa<llvm::PointerType>(Val->getType())) {
874 // If this is Pointer->Pointer avoid conversion to and from int.
875 if (isa<llvm::PointerType>(Ty))
876 return CGF.Builder.CreateBitCast(Val, Ty, "coerce.val");
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +0000877
Chris Lattner055097f2010-06-27 06:26:04 +0000878 // Convert the pointer to an integer so we can play with its width.
Chris Lattner5e016ae2010-06-27 07:15:29 +0000879 Val = CGF.Builder.CreatePtrToInt(Val, CGF.IntPtrTy, "coerce.val.pi");
Chris Lattner055097f2010-06-27 06:26:04 +0000880 }
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +0000881
Chris Lattner2192fe52011-07-18 04:24:23 +0000882 llvm::Type *DestIntTy = Ty;
Chris Lattner055097f2010-06-27 06:26:04 +0000883 if (isa<llvm::PointerType>(DestIntTy))
Chris Lattner5e016ae2010-06-27 07:15:29 +0000884 DestIntTy = CGF.IntPtrTy;
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +0000885
Jakob Stoklund Olesen36af2522013-06-05 03:00:13 +0000886 if (Val->getType() != DestIntTy) {
887 const llvm::DataLayout &DL = CGF.CGM.getDataLayout();
888 if (DL.isBigEndian()) {
889 // Preserve the high bits on big-endian targets.
890 // That is what memory coercion does.
James Molloy491cefb2014-05-07 17:41:15 +0000891 uint64_t SrcSize = DL.getTypeSizeInBits(Val->getType());
892 uint64_t DstSize = DL.getTypeSizeInBits(DestIntTy);
893
Jakob Stoklund Olesen36af2522013-06-05 03:00:13 +0000894 if (SrcSize > DstSize) {
895 Val = CGF.Builder.CreateLShr(Val, SrcSize - DstSize, "coerce.highbits");
896 Val = CGF.Builder.CreateTrunc(Val, DestIntTy, "coerce.val.ii");
897 } else {
898 Val = CGF.Builder.CreateZExt(Val, DestIntTy, "coerce.val.ii");
899 Val = CGF.Builder.CreateShl(Val, DstSize - SrcSize, "coerce.highbits");
900 }
901 } else {
902 // Little-endian targets preserve the low bits. No shifts required.
903 Val = CGF.Builder.CreateIntCast(Val, DestIntTy, false, "coerce.val.ii");
904 }
905 }
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +0000906
Chris Lattner055097f2010-06-27 06:26:04 +0000907 if (isa<llvm::PointerType>(Ty))
908 Val = CGF.Builder.CreateIntToPtr(Val, Ty, "coerce.val.ip");
909 return Val;
910}
911
Chris Lattner1cd66982010-06-27 05:56:15 +0000912
913
Daniel Dunbarf5589ac2009-02-02 19:06:38 +0000914/// CreateCoercedLoad - Create a load from \arg SrcPtr interpreted as
Ulrich Weigand6e2cea62015-07-10 11:31:43 +0000915/// a pointer to an object of type \arg Ty, known to be aligned to
916/// \arg SrcAlign bytes.
Daniel Dunbarf5589ac2009-02-02 19:06:38 +0000917///
918/// This safely handles the case when the src type is smaller than the
919/// destination type; in this situation the values of bits which not
920/// present in the src are undefined.
921static llvm::Value *CreateCoercedLoad(llvm::Value *SrcPtr,
Ulrich Weigand6e2cea62015-07-10 11:31:43 +0000922 llvm::Type *Ty, CharUnits SrcAlign,
Daniel Dunbarf5589ac2009-02-02 19:06:38 +0000923 CodeGenFunction &CGF) {
Chris Lattner2192fe52011-07-18 04:24:23 +0000924 llvm::Type *SrcTy =
Daniel Dunbarf5589ac2009-02-02 19:06:38 +0000925 cast<llvm::PointerType>(SrcPtr->getType())->getElementType();
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +0000926
Chris Lattnerd200eda2010-06-28 22:51:39 +0000927 // If SrcTy and Ty are the same, just do a load.
928 if (SrcTy == Ty)
Ulrich Weigand6e2cea62015-07-10 11:31:43 +0000929 return CGF.Builder.CreateAlignedLoad(SrcPtr, SrcAlign.getQuantity());
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +0000930
Micah Villmowdd31ca12012-10-08 16:25:52 +0000931 uint64_t DstSize = CGF.CGM.getDataLayout().getTypeAllocSize(Ty);
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +0000932
Chris Lattner2192fe52011-07-18 04:24:23 +0000933 if (llvm::StructType *SrcSTy = dyn_cast<llvm::StructType>(SrcTy)) {
Chris Lattner895c52b2010-06-27 06:04:18 +0000934 SrcPtr = EnterStructPointerForCoercedAccess(SrcPtr, SrcSTy, DstSize, CGF);
Chris Lattner1cd66982010-06-27 05:56:15 +0000935 SrcTy = cast<llvm::PointerType>(SrcPtr->getType())->getElementType();
936 }
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +0000937
Micah Villmowdd31ca12012-10-08 16:25:52 +0000938 uint64_t SrcSize = CGF.CGM.getDataLayout().getTypeAllocSize(SrcTy);
Daniel Dunbarf5589ac2009-02-02 19:06:38 +0000939
Chris Lattner055097f2010-06-27 06:26:04 +0000940 // If the source and destination are integer or pointer types, just do an
941 // extension or truncation to the desired type.
942 if ((isa<llvm::IntegerType>(Ty) || isa<llvm::PointerType>(Ty)) &&
943 (isa<llvm::IntegerType>(SrcTy) || isa<llvm::PointerType>(SrcTy))) {
Ulrich Weigand6e2cea62015-07-10 11:31:43 +0000944 llvm::LoadInst *Load =
945 CGF.Builder.CreateAlignedLoad(SrcPtr, SrcAlign.getQuantity());
Chris Lattner055097f2010-06-27 06:26:04 +0000946 return CoerceIntOrPtrToIntOrPtr(Load, Ty, CGF);
947 }
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +0000948
Daniel Dunbarb52d0772009-02-03 05:59:18 +0000949 // If load is legal, just bitcast the src pointer.
Daniel Dunbarffdb8432009-05-13 18:54:26 +0000950 if (SrcSize >= DstSize) {
Mike Stump18bb9282009-05-16 07:57:57 +0000951 // Generally SrcSize is never greater than DstSize, since this means we are
952 // losing bits. However, this can happen in cases where the structure has
953 // additional padding, for example due to a user specified alignment.
Daniel Dunbarffdb8432009-05-13 18:54:26 +0000954 //
Mike Stump18bb9282009-05-16 07:57:57 +0000955 // FIXME: Assert that we aren't truncating non-padding bits when have access
956 // to that information.
Daniel Dunbarf5589ac2009-02-02 19:06:38 +0000957 llvm::Value *Casted =
958 CGF.Builder.CreateBitCast(SrcPtr, llvm::PointerType::getUnqual(Ty));
Ulrich Weigand6e2cea62015-07-10 11:31:43 +0000959 return CGF.Builder.CreateAlignedLoad(Casted, SrcAlign.getQuantity());
Daniel Dunbarf5589ac2009-02-02 19:06:38 +0000960 }
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +0000961
Chris Lattner3fcc7902010-06-27 01:06:27 +0000962 // Otherwise do coercion through memory. This is stupid, but
963 // simple.
Ulrich Weigand6e2cea62015-07-10 11:31:43 +0000964 llvm::AllocaInst *Tmp = CGF.CreateTempAlloca(Ty);
965 Tmp->setAlignment(SrcAlign.getQuantity());
Manman Ren84b921f2012-11-28 22:08:52 +0000966 llvm::Type *I8PtrTy = CGF.Builder.getInt8PtrTy();
967 llvm::Value *Casted = CGF.Builder.CreateBitCast(Tmp, I8PtrTy);
968 llvm::Value *SrcCasted = CGF.Builder.CreateBitCast(SrcPtr, I8PtrTy);
969 CGF.Builder.CreateMemCpy(Casted, SrcCasted,
970 llvm::ConstantInt::get(CGF.IntPtrTy, SrcSize),
Ulrich Weigand6e2cea62015-07-10 11:31:43 +0000971 SrcAlign.getQuantity(), false);
972 return CGF.Builder.CreateAlignedLoad(Tmp, SrcAlign.getQuantity());
Daniel Dunbarf5589ac2009-02-02 19:06:38 +0000973}
974
Eli Friedmanaf9b3252011-05-17 21:08:01 +0000975// Function to store a first-class aggregate into memory. We prefer to
976// store the elements rather than the aggregate to be more friendly to
977// fast-isel.
978// FIXME: Do we need to recurse here?
979static void BuildAggStore(CodeGenFunction &CGF, llvm::Value *Val,
980 llvm::Value *DestPtr, bool DestIsVolatile,
Ulrich Weigand6e2cea62015-07-10 11:31:43 +0000981 CharUnits DestAlign) {
Eli Friedmanaf9b3252011-05-17 21:08:01 +0000982 // Prefer scalar stores to first-class aggregate stores.
Chris Lattner2192fe52011-07-18 04:24:23 +0000983 if (llvm::StructType *STy =
Eli Friedmanaf9b3252011-05-17 21:08:01 +0000984 dyn_cast<llvm::StructType>(Val->getType())) {
Ulrich Weigand6e2cea62015-07-10 11:31:43 +0000985 const llvm::StructLayout *Layout =
986 CGF.CGM.getDataLayout().getStructLayout(STy);
987
Eli Friedmanaf9b3252011-05-17 21:08:01 +0000988 for (unsigned i = 0, e = STy->getNumElements(); i != e; ++i) {
David Blaikie17ea2662015-04-04 21:07:17 +0000989 llvm::Value *EltPtr = CGF.Builder.CreateConstGEP2_32(STy, DestPtr, 0, i);
Eli Friedmanaf9b3252011-05-17 21:08:01 +0000990 llvm::Value *Elt = CGF.Builder.CreateExtractValue(Val, i);
Ulrich Weigand6e2cea62015-07-10 11:31:43 +0000991 uint64_t EltOffset = Layout->getElementOffset(i);
992 CharUnits EltAlign =
993 DestAlign.alignmentAtOffset(CharUnits::fromQuantity(EltOffset));
994 CGF.Builder.CreateAlignedStore(Elt, EltPtr, EltAlign.getQuantity(),
995 DestIsVolatile);
Eli Friedmanaf9b3252011-05-17 21:08:01 +0000996 }
997 } else {
Ulrich Weigand6e2cea62015-07-10 11:31:43 +0000998 CGF.Builder.CreateAlignedStore(Val, DestPtr, DestAlign.getQuantity(),
999 DestIsVolatile);
Eli Friedmanaf9b3252011-05-17 21:08:01 +00001000 }
1001}
1002
Daniel Dunbarf5589ac2009-02-02 19:06:38 +00001003/// CreateCoercedStore - Create a store to \arg DstPtr from \arg Src,
Ulrich Weigand6e2cea62015-07-10 11:31:43 +00001004/// where the source and destination may have different types. The
1005/// destination is known to be aligned to \arg DstAlign bytes.
Daniel Dunbarf5589ac2009-02-02 19:06:38 +00001006///
1007/// This safely handles the case when the src type is larger than the
1008/// destination type; the upper bits of the src will be lost.
1009static void CreateCoercedStore(llvm::Value *Src,
1010 llvm::Value *DstPtr,
Anders Carlsson17490832009-12-24 20:40:36 +00001011 bool DstIsVolatile,
Ulrich Weigand6e2cea62015-07-10 11:31:43 +00001012 CharUnits DstAlign,
Daniel Dunbarf5589ac2009-02-02 19:06:38 +00001013 CodeGenFunction &CGF) {
Chris Lattner2192fe52011-07-18 04:24:23 +00001014 llvm::Type *SrcTy = Src->getType();
1015 llvm::Type *DstTy =
Daniel Dunbarf5589ac2009-02-02 19:06:38 +00001016 cast<llvm::PointerType>(DstPtr->getType())->getElementType();
Chris Lattnerd200eda2010-06-28 22:51:39 +00001017 if (SrcTy == DstTy) {
Ulrich Weigand6e2cea62015-07-10 11:31:43 +00001018 CGF.Builder.CreateAlignedStore(Src, DstPtr, DstAlign.getQuantity(),
1019 DstIsVolatile);
Chris Lattnerd200eda2010-06-28 22:51:39 +00001020 return;
1021 }
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00001022
Micah Villmowdd31ca12012-10-08 16:25:52 +00001023 uint64_t SrcSize = CGF.CGM.getDataLayout().getTypeAllocSize(SrcTy);
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00001024
Chris Lattner2192fe52011-07-18 04:24:23 +00001025 if (llvm::StructType *DstSTy = dyn_cast<llvm::StructType>(DstTy)) {
Chris Lattner895c52b2010-06-27 06:04:18 +00001026 DstPtr = EnterStructPointerForCoercedAccess(DstPtr, DstSTy, SrcSize, CGF);
1027 DstTy = cast<llvm::PointerType>(DstPtr->getType())->getElementType();
1028 }
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00001029
Chris Lattner055097f2010-06-27 06:26:04 +00001030 // If the source and destination are integer or pointer types, just do an
1031 // extension or truncation to the desired type.
1032 if ((isa<llvm::IntegerType>(SrcTy) || isa<llvm::PointerType>(SrcTy)) &&
1033 (isa<llvm::IntegerType>(DstTy) || isa<llvm::PointerType>(DstTy))) {
1034 Src = CoerceIntOrPtrToIntOrPtr(Src, DstTy, CGF);
Ulrich Weigand6e2cea62015-07-10 11:31:43 +00001035 CGF.Builder.CreateAlignedStore(Src, DstPtr, DstAlign.getQuantity(),
1036 DstIsVolatile);
Chris Lattner055097f2010-06-27 06:26:04 +00001037 return;
1038 }
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00001039
Micah Villmowdd31ca12012-10-08 16:25:52 +00001040 uint64_t DstSize = CGF.CGM.getDataLayout().getTypeAllocSize(DstTy);
Daniel Dunbarf5589ac2009-02-02 19:06:38 +00001041
Daniel Dunbar313321e2009-02-03 05:31:23 +00001042 // If store is legal, just bitcast the src pointer.
Daniel Dunbar4be99ff2009-06-05 07:58:54 +00001043 if (SrcSize <= DstSize) {
Daniel Dunbarf5589ac2009-02-02 19:06:38 +00001044 llvm::Value *Casted =
1045 CGF.Builder.CreateBitCast(DstPtr, llvm::PointerType::getUnqual(SrcTy));
Ulrich Weigand6e2cea62015-07-10 11:31:43 +00001046 BuildAggStore(CGF, Src, Casted, DstIsVolatile, DstAlign);
Daniel Dunbarf5589ac2009-02-02 19:06:38 +00001047 } else {
Daniel Dunbarf5589ac2009-02-02 19:06:38 +00001048 // Otherwise do coercion through memory. This is stupid, but
1049 // simple.
Daniel Dunbar4be99ff2009-06-05 07:58:54 +00001050
1051 // Generally SrcSize is never greater than DstSize, since this means we are
1052 // losing bits. However, this can happen in cases where the structure has
1053 // additional padding, for example due to a user specified alignment.
1054 //
1055 // FIXME: Assert that we aren't truncating non-padding bits when have access
1056 // to that information.
Ulrich Weigand6e2cea62015-07-10 11:31:43 +00001057 llvm::AllocaInst *Tmp = CGF.CreateTempAlloca(SrcTy);
1058 Tmp->setAlignment(DstAlign.getQuantity());
1059 CGF.Builder.CreateAlignedStore(Src, Tmp, DstAlign.getQuantity());
Manman Ren84b921f2012-11-28 22:08:52 +00001060 llvm::Type *I8PtrTy = CGF.Builder.getInt8PtrTy();
1061 llvm::Value *Casted = CGF.Builder.CreateBitCast(Tmp, I8PtrTy);
1062 llvm::Value *DstCasted = CGF.Builder.CreateBitCast(DstPtr, I8PtrTy);
1063 CGF.Builder.CreateMemCpy(DstCasted, Casted,
1064 llvm::ConstantInt::get(CGF.IntPtrTy, DstSize),
Ulrich Weigand6e2cea62015-07-10 11:31:43 +00001065 DstAlign.getQuantity(), false);
Daniel Dunbarf5589ac2009-02-02 19:06:38 +00001066 }
1067}
1068
Alexey Samsonov153004f2014-09-29 22:08:00 +00001069namespace {
1070
1071/// Encapsulates information about the way function arguments from
1072/// CGFunctionInfo should be passed to actual LLVM IR function.
1073class ClangToLLVMArgMapping {
1074 static const unsigned InvalidIndex = ~0U;
1075 unsigned InallocaArgNo;
1076 unsigned SRetArgNo;
1077 unsigned TotalIRArgs;
1078
1079 /// Arguments of LLVM IR function corresponding to single Clang argument.
1080 struct IRArgs {
1081 unsigned PaddingArgIndex;
1082 // Argument is expanded to IR arguments at positions
1083 // [FirstArgIndex, FirstArgIndex + NumberOfArgs).
1084 unsigned FirstArgIndex;
1085 unsigned NumberOfArgs;
1086
1087 IRArgs()
1088 : PaddingArgIndex(InvalidIndex), FirstArgIndex(InvalidIndex),
1089 NumberOfArgs(0) {}
1090 };
1091
1092 SmallVector<IRArgs, 8> ArgInfo;
1093
1094public:
1095 ClangToLLVMArgMapping(const ASTContext &Context, const CGFunctionInfo &FI,
1096 bool OnlyRequiredArgs = false)
1097 : InallocaArgNo(InvalidIndex), SRetArgNo(InvalidIndex), TotalIRArgs(0),
1098 ArgInfo(OnlyRequiredArgs ? FI.getNumRequiredArgs() : FI.arg_size()) {
1099 construct(Context, FI, OnlyRequiredArgs);
1100 }
1101
1102 bool hasInallocaArg() const { return InallocaArgNo != InvalidIndex; }
1103 unsigned getInallocaArgNo() const {
1104 assert(hasInallocaArg());
1105 return InallocaArgNo;
1106 }
1107
1108 bool hasSRetArg() const { return SRetArgNo != InvalidIndex; }
1109 unsigned getSRetArgNo() const {
1110 assert(hasSRetArg());
1111 return SRetArgNo;
1112 }
1113
1114 unsigned totalIRArgs() const { return TotalIRArgs; }
1115
1116 bool hasPaddingArg(unsigned ArgNo) const {
1117 assert(ArgNo < ArgInfo.size());
1118 return ArgInfo[ArgNo].PaddingArgIndex != InvalidIndex;
1119 }
1120 unsigned getPaddingArgNo(unsigned ArgNo) const {
1121 assert(hasPaddingArg(ArgNo));
1122 return ArgInfo[ArgNo].PaddingArgIndex;
1123 }
1124
1125 /// Returns index of first IR argument corresponding to ArgNo, and their
1126 /// quantity.
1127 std::pair<unsigned, unsigned> getIRArgs(unsigned ArgNo) const {
1128 assert(ArgNo < ArgInfo.size());
1129 return std::make_pair(ArgInfo[ArgNo].FirstArgIndex,
1130 ArgInfo[ArgNo].NumberOfArgs);
1131 }
1132
1133private:
1134 void construct(const ASTContext &Context, const CGFunctionInfo &FI,
1135 bool OnlyRequiredArgs);
1136};
1137
1138void ClangToLLVMArgMapping::construct(const ASTContext &Context,
1139 const CGFunctionInfo &FI,
1140 bool OnlyRequiredArgs) {
1141 unsigned IRArgNo = 0;
1142 bool SwapThisWithSRet = false;
1143 const ABIArgInfo &RetAI = FI.getReturnInfo();
1144
1145 if (RetAI.getKind() == ABIArgInfo::Indirect) {
1146 SwapThisWithSRet = RetAI.isSRetAfterThis();
1147 SRetArgNo = SwapThisWithSRet ? 1 : IRArgNo++;
1148 }
1149
1150 unsigned ArgNo = 0;
1151 unsigned NumArgs = OnlyRequiredArgs ? FI.getNumRequiredArgs() : FI.arg_size();
1152 for (CGFunctionInfo::const_arg_iterator I = FI.arg_begin(); ArgNo < NumArgs;
1153 ++I, ++ArgNo) {
1154 assert(I != FI.arg_end());
1155 QualType ArgType = I->type;
1156 const ABIArgInfo &AI = I->info;
1157 // Collect data about IR arguments corresponding to Clang argument ArgNo.
1158 auto &IRArgs = ArgInfo[ArgNo];
1159
1160 if (AI.getPaddingType())
1161 IRArgs.PaddingArgIndex = IRArgNo++;
1162
1163 switch (AI.getKind()) {
1164 case ABIArgInfo::Extend:
1165 case ABIArgInfo::Direct: {
1166 // FIXME: handle sseregparm someday...
1167 llvm::StructType *STy = dyn_cast<llvm::StructType>(AI.getCoerceToType());
1168 if (AI.isDirect() && AI.getCanBeFlattened() && STy) {
1169 IRArgs.NumberOfArgs = STy->getNumElements();
1170 } else {
1171 IRArgs.NumberOfArgs = 1;
1172 }
1173 break;
1174 }
1175 case ABIArgInfo::Indirect:
1176 IRArgs.NumberOfArgs = 1;
1177 break;
1178 case ABIArgInfo::Ignore:
1179 case ABIArgInfo::InAlloca:
1180 // ignore and inalloca doesn't have matching LLVM parameters.
1181 IRArgs.NumberOfArgs = 0;
1182 break;
1183 case ABIArgInfo::Expand: {
1184 IRArgs.NumberOfArgs = getExpansionSize(ArgType, Context);
1185 break;
1186 }
1187 }
1188
1189 if (IRArgs.NumberOfArgs > 0) {
1190 IRArgs.FirstArgIndex = IRArgNo;
1191 IRArgNo += IRArgs.NumberOfArgs;
1192 }
1193
1194 // Skip over the sret parameter when it comes second. We already handled it
1195 // above.
1196 if (IRArgNo == 1 && SwapThisWithSRet)
1197 IRArgNo++;
1198 }
1199 assert(ArgNo == ArgInfo.size());
1200
1201 if (FI.usesInAlloca())
1202 InallocaArgNo = IRArgNo++;
1203
1204 TotalIRArgs = IRArgNo;
1205}
1206} // namespace
1207
Daniel Dunbar8fc81b02008-09-17 00:51:38 +00001208/***/
1209
Daniel Dunbar6f2e8392010-07-14 23:39:36 +00001210bool CodeGenModule::ReturnTypeUsesSRet(const CGFunctionInfo &FI) {
Daniel Dunbarb8b1c672009-02-05 08:00:50 +00001211 return FI.getReturnInfo().isIndirect();
Daniel Dunbar7633cbf2009-02-02 21:43:58 +00001212}
1213
Tim Northovere77cc392014-03-29 13:28:05 +00001214bool CodeGenModule::ReturnSlotInterferesWithArgs(const CGFunctionInfo &FI) {
1215 return ReturnTypeUsesSRet(FI) &&
1216 getTargetCodeGenInfo().doesReturnSlotInterfereWithArgs();
1217}
1218
Daniel Dunbar6f2e8392010-07-14 23:39:36 +00001219bool CodeGenModule::ReturnTypeUsesFPRet(QualType ResultType) {
1220 if (const BuiltinType *BT = ResultType->getAs<BuiltinType>()) {
1221 switch (BT->getKind()) {
1222 default:
1223 return false;
1224 case BuiltinType::Float:
John McCallc8e01702013-04-16 22:48:15 +00001225 return getTarget().useObjCFPRetForRealType(TargetInfo::Float);
Daniel Dunbar6f2e8392010-07-14 23:39:36 +00001226 case BuiltinType::Double:
John McCallc8e01702013-04-16 22:48:15 +00001227 return getTarget().useObjCFPRetForRealType(TargetInfo::Double);
Daniel Dunbar6f2e8392010-07-14 23:39:36 +00001228 case BuiltinType::LongDouble:
John McCallc8e01702013-04-16 22:48:15 +00001229 return getTarget().useObjCFPRetForRealType(TargetInfo::LongDouble);
Daniel Dunbar6f2e8392010-07-14 23:39:36 +00001230 }
1231 }
1232
1233 return false;
1234}
1235
Anders Carlsson2f1a6c32011-10-31 16:27:11 +00001236bool CodeGenModule::ReturnTypeUsesFP2Ret(QualType ResultType) {
1237 if (const ComplexType *CT = ResultType->getAs<ComplexType>()) {
1238 if (const BuiltinType *BT = CT->getElementType()->getAs<BuiltinType>()) {
1239 if (BT->getKind() == BuiltinType::LongDouble)
John McCallc8e01702013-04-16 22:48:15 +00001240 return getTarget().useObjCFP2RetForComplexLongDouble();
Anders Carlsson2f1a6c32011-10-31 16:27:11 +00001241 }
1242 }
1243
1244 return false;
1245}
1246
Chris Lattnera5f58b02011-07-09 17:41:47 +00001247llvm::FunctionType *CodeGenTypes::GetFunctionType(GlobalDecl GD) {
John McCalla729c622012-02-17 03:33:10 +00001248 const CGFunctionInfo &FI = arrangeGlobalDeclaration(GD);
1249 return GetFunctionType(FI);
John McCallf8ff7b92010-02-23 00:48:20 +00001250}
1251
Chris Lattnera5f58b02011-07-09 17:41:47 +00001252llvm::FunctionType *
John McCalla729c622012-02-17 03:33:10 +00001253CodeGenTypes::GetFunctionType(const CGFunctionInfo &FI) {
Alexey Samsonov153004f2014-09-29 22:08:00 +00001254
David Blaikie82e95a32014-11-19 07:49:47 +00001255 bool Inserted = FunctionsBeingProcessed.insert(&FI).second;
1256 (void)Inserted;
Chris Lattner6fb0ccf2011-07-15 05:16:14 +00001257 assert(Inserted && "Recursively being processed?");
Daniel Dunbar7a95ca32008-09-10 04:01:49 +00001258
Alexey Samsonov153004f2014-09-29 22:08:00 +00001259 llvm::Type *resultType = nullptr;
John McCall85dd2c52011-05-15 02:19:42 +00001260 const ABIArgInfo &retAI = FI.getReturnInfo();
1261 switch (retAI.getKind()) {
Daniel Dunbard3674e62008-09-11 01:48:57 +00001262 case ABIArgInfo::Expand:
John McCall85dd2c52011-05-15 02:19:42 +00001263 llvm_unreachable("Invalid ABI kind for return argument");
Daniel Dunbard3674e62008-09-11 01:48:57 +00001264
Anton Korobeynikov18adbf52009-06-06 09:36:29 +00001265 case ABIArgInfo::Extend:
Daniel Dunbar67dace892009-02-03 06:17:37 +00001266 case ABIArgInfo::Direct:
John McCall85dd2c52011-05-15 02:19:42 +00001267 resultType = retAI.getCoerceToType();
Daniel Dunbar67dace892009-02-03 06:17:37 +00001268 break;
1269
Reid Kleckner314ef7b2014-02-01 00:04:45 +00001270 case ABIArgInfo::InAlloca:
Reid Klecknerfab1e892014-02-25 00:59:14 +00001271 if (retAI.getInAllocaSRet()) {
1272 // sret things on win32 aren't void, they return the sret pointer.
1273 QualType ret = FI.getReturnType();
1274 llvm::Type *ty = ConvertType(ret);
1275 unsigned addressSpace = Context.getTargetAddressSpace(ret);
1276 resultType = llvm::PointerType::get(ty, addressSpace);
1277 } else {
1278 resultType = llvm::Type::getVoidTy(getLLVMContext());
1279 }
Reid Kleckner314ef7b2014-02-01 00:04:45 +00001280 break;
1281
Daniel Dunbarb8b1c672009-02-05 08:00:50 +00001282 case ABIArgInfo::Indirect: {
John McCall85dd2c52011-05-15 02:19:42 +00001283 assert(!retAI.getIndirectAlign() && "Align unused on indirect return.");
1284 resultType = llvm::Type::getVoidTy(getLLVMContext());
Daniel Dunbar7a95ca32008-09-10 04:01:49 +00001285 break;
1286 }
1287
Daniel Dunbar94a6f252009-01-26 21:26:08 +00001288 case ABIArgInfo::Ignore:
John McCall85dd2c52011-05-15 02:19:42 +00001289 resultType = llvm::Type::getVoidTy(getLLVMContext());
Daniel Dunbar94a6f252009-01-26 21:26:08 +00001290 break;
Daniel Dunbar7a95ca32008-09-10 04:01:49 +00001291 }
Mike Stump11289f42009-09-09 15:08:12 +00001292
Alexey Samsonov153004f2014-09-29 22:08:00 +00001293 ClangToLLVMArgMapping IRFunctionArgs(getContext(), FI, true);
1294 SmallVector<llvm::Type*, 8> ArgTypes(IRFunctionArgs.totalIRArgs());
1295
1296 // Add type for sret argument.
1297 if (IRFunctionArgs.hasSRetArg()) {
1298 QualType Ret = FI.getReturnType();
1299 llvm::Type *Ty = ConvertType(Ret);
1300 unsigned AddressSpace = Context.getTargetAddressSpace(Ret);
1301 ArgTypes[IRFunctionArgs.getSRetArgNo()] =
1302 llvm::PointerType::get(Ty, AddressSpace);
1303 }
1304
1305 // Add type for inalloca argument.
1306 if (IRFunctionArgs.hasInallocaArg()) {
1307 auto ArgStruct = FI.getArgStruct();
1308 assert(ArgStruct);
1309 ArgTypes[IRFunctionArgs.getInallocaArgNo()] = ArgStruct->getPointerTo();
1310 }
1311
John McCallc818bbb2012-12-07 07:03:17 +00001312 // Add in all of the required arguments.
Alexey Samsonov153004f2014-09-29 22:08:00 +00001313 unsigned ArgNo = 0;
Alexey Samsonov34625dd2014-09-29 21:21:48 +00001314 CGFunctionInfo::const_arg_iterator it = FI.arg_begin(),
1315 ie = it + FI.getNumRequiredArgs();
Alexey Samsonov153004f2014-09-29 22:08:00 +00001316 for (; it != ie; ++it, ++ArgNo) {
1317 const ABIArgInfo &ArgInfo = it->info;
Mike Stump11289f42009-09-09 15:08:12 +00001318
Rafael Espindolafad28de2012-10-24 01:59:00 +00001319 // Insert a padding type to ensure proper alignment.
Alexey Samsonov153004f2014-09-29 22:08:00 +00001320 if (IRFunctionArgs.hasPaddingArg(ArgNo))
1321 ArgTypes[IRFunctionArgs.getPaddingArgNo(ArgNo)] =
1322 ArgInfo.getPaddingType();
Rafael Espindolafad28de2012-10-24 01:59:00 +00001323
Alexey Samsonov153004f2014-09-29 22:08:00 +00001324 unsigned FirstIRArg, NumIRArgs;
1325 std::tie(FirstIRArg, NumIRArgs) = IRFunctionArgs.getIRArgs(ArgNo);
1326
1327 switch (ArgInfo.getKind()) {
Daniel Dunbar94a6f252009-01-26 21:26:08 +00001328 case ABIArgInfo::Ignore:
Reid Kleckner314ef7b2014-02-01 00:04:45 +00001329 case ABIArgInfo::InAlloca:
Alexey Samsonov153004f2014-09-29 22:08:00 +00001330 assert(NumIRArgs == 0);
Daniel Dunbar94a6f252009-01-26 21:26:08 +00001331 break;
1332
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00001333 case ABIArgInfo::Indirect: {
Alexey Samsonov153004f2014-09-29 22:08:00 +00001334 assert(NumIRArgs == 1);
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00001335 // indirect arguments are always on the stack, which is addr space #0.
Chris Lattner2192fe52011-07-18 04:24:23 +00001336 llvm::Type *LTy = ConvertTypeForMem(it->type);
Alexey Samsonov153004f2014-09-29 22:08:00 +00001337 ArgTypes[FirstIRArg] = LTy->getPointerTo();
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00001338 break;
1339 }
1340
1341 case ABIArgInfo::Extend:
Chris Lattner2cdfda42010-07-29 06:44:09 +00001342 case ABIArgInfo::Direct: {
Oliver Stannard2bfdc5b2014-08-27 10:43:15 +00001343 // Fast-isel and the optimizer generally like scalar values better than
1344 // FCAs, so we flatten them if this is safe to do for this argument.
Alexey Samsonov153004f2014-09-29 22:08:00 +00001345 llvm::Type *argType = ArgInfo.getCoerceToType();
James Molloy6f244b62014-05-09 16:21:39 +00001346 llvm::StructType *st = dyn_cast<llvm::StructType>(argType);
Alexey Samsonov153004f2014-09-29 22:08:00 +00001347 if (st && ArgInfo.isDirect() && ArgInfo.getCanBeFlattened()) {
1348 assert(NumIRArgs == st->getNumElements());
John McCall85dd2c52011-05-15 02:19:42 +00001349 for (unsigned i = 0, e = st->getNumElements(); i != e; ++i)
Alexey Samsonov153004f2014-09-29 22:08:00 +00001350 ArgTypes[FirstIRArg + i] = st->getElementType(i);
Chris Lattner3dd716c2010-06-28 23:44:11 +00001351 } else {
Alexey Samsonov153004f2014-09-29 22:08:00 +00001352 assert(NumIRArgs == 1);
1353 ArgTypes[FirstIRArg] = argType;
Chris Lattner3dd716c2010-06-28 23:44:11 +00001354 }
Daniel Dunbar2f219b02009-02-03 19:12:28 +00001355 break;
Chris Lattner2cdfda42010-07-29 06:44:09 +00001356 }
Mike Stump11289f42009-09-09 15:08:12 +00001357
Daniel Dunbard3674e62008-09-11 01:48:57 +00001358 case ABIArgInfo::Expand:
Alexey Samsonov153004f2014-09-29 22:08:00 +00001359 auto ArgTypesIter = ArgTypes.begin() + FirstIRArg;
1360 getExpandedTypes(it->type, ArgTypesIter);
1361 assert(ArgTypesIter == ArgTypes.begin() + FirstIRArg + NumIRArgs);
Daniel Dunbard3674e62008-09-11 01:48:57 +00001362 break;
1363 }
Daniel Dunbar7a95ca32008-09-10 04:01:49 +00001364 }
1365
Chris Lattner6fb0ccf2011-07-15 05:16:14 +00001366 bool Erased = FunctionsBeingProcessed.erase(&FI); (void)Erased;
1367 assert(Erased && "Not in set?");
Alexey Samsonov153004f2014-09-29 22:08:00 +00001368
1369 return llvm::FunctionType::get(resultType, ArgTypes, FI.isVariadic());
Daniel Dunbar81cf67f2008-09-09 23:48:28 +00001370}
1371
Chris Lattner2192fe52011-07-18 04:24:23 +00001372llvm::Type *CodeGenTypes::GetFunctionTypeForVTable(GlobalDecl GD) {
John McCall5d865c322010-08-31 07:33:07 +00001373 const CXXMethodDecl *MD = cast<CXXMethodDecl>(GD.getDecl());
Anders Carlsson64457732009-11-24 05:08:52 +00001374 const FunctionProtoType *FPT = MD->getType()->getAs<FunctionProtoType>();
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00001375
Chris Lattner8806e322011-07-10 00:18:59 +00001376 if (!isFuncTypeConvertible(FPT))
1377 return llvm::StructType::get(getLLVMContext());
1378
1379 const CGFunctionInfo *Info;
1380 if (isa<CXXDestructorDecl>(MD))
Rafael Espindola8d2a19b2014-09-08 16:01:27 +00001381 Info =
1382 &arrangeCXXStructorDeclaration(MD, getFromDtorType(GD.getDtorType()));
Chris Lattner8806e322011-07-10 00:18:59 +00001383 else
John McCalla729c622012-02-17 03:33:10 +00001384 Info = &arrangeCXXMethodDeclaration(MD);
1385 return GetFunctionType(*Info);
Anders Carlsson64457732009-11-24 05:08:52 +00001386}
1387
Daniel Dunbar3668cb22009-02-02 23:43:58 +00001388void CodeGenModule::ConstructAttributeList(const CGFunctionInfo &FI,
Daniel Dunbard931a872009-02-02 22:03:45 +00001389 const Decl *TargetDecl,
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00001390 AttributeListType &PAL,
Bill Wendlingf4d64cb2013-02-22 00:13:35 +00001391 unsigned &CallingConv,
1392 bool AttrOnCallSite) {
Bill Wendlinga514ebc2012-10-15 20:36:26 +00001393 llvm::AttrBuilder FuncAttrs;
1394 llvm::AttrBuilder RetAttrs;
Paul Robinson08556952014-12-11 20:14:04 +00001395 bool HasOptnone = false;
Daniel Dunbar76c8eb72008-09-10 00:32:18 +00001396
Daniel Dunbar0ef34792009-09-12 00:59:20 +00001397 CallingConv = FI.getEffectiveCallingConvention();
1398
John McCallab26cfa2010-02-05 21:31:56 +00001399 if (FI.isNoReturn())
Bill Wendling207f0532012-12-20 19:27:06 +00001400 FuncAttrs.addAttribute(llvm::Attribute::NoReturn);
John McCallab26cfa2010-02-05 21:31:56 +00001401
Anton Korobeynikovc8478242009-04-04 00:49:24 +00001402 // FIXME: handle sseregparm someday...
Daniel Dunbar76c8eb72008-09-10 00:32:18 +00001403 if (TargetDecl) {
Rafael Espindola2d21ab02011-10-12 19:51:18 +00001404 if (TargetDecl->hasAttr<ReturnsTwiceAttr>())
Bill Wendling207f0532012-12-20 19:27:06 +00001405 FuncAttrs.addAttribute(llvm::Attribute::ReturnsTwice);
Argyrios Kyrtzidisb4b64ca2009-06-30 02:34:44 +00001406 if (TargetDecl->hasAttr<NoThrowAttr>())
Bill Wendling207f0532012-12-20 19:27:06 +00001407 FuncAttrs.addAttribute(llvm::Attribute::NoUnwind);
Richard Smithdebc59d2013-01-30 05:45:05 +00001408 if (TargetDecl->hasAttr<NoReturnAttr>())
1409 FuncAttrs.addAttribute(llvm::Attribute::NoReturn);
Aaron Ballman7c19ab12014-02-22 16:59:24 +00001410 if (TargetDecl->hasAttr<NoDuplicateAttr>())
1411 FuncAttrs.addAttribute(llvm::Attribute::NoDuplicate);
Richard Smithdebc59d2013-01-30 05:45:05 +00001412
1413 if (const FunctionDecl *Fn = dyn_cast<FunctionDecl>(TargetDecl)) {
John McCallbe349de2010-07-08 06:48:12 +00001414 const FunctionProtoType *FPT = Fn->getType()->getAs<FunctionProtoType>();
Steven Wu5528da72015-08-28 07:14:10 +00001415 if (FPT && FPT->isNothrow(getContext()))
Bill Wendling207f0532012-12-20 19:27:06 +00001416 FuncAttrs.addAttribute(llvm::Attribute::NoUnwind);
Richard Smith49af6292013-03-05 08:30:04 +00001417 // Don't use [[noreturn]] or _Noreturn for a call to a virtual function.
1418 // These attributes are not inherited by overloads.
1419 const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(Fn);
1420 if (Fn->isNoReturn() && !(AttrOnCallSite && MD && MD->isVirtual()))
Richard Smithdebc59d2013-01-30 05:45:05 +00001421 FuncAttrs.addAttribute(llvm::Attribute::NoReturn);
John McCallbe349de2010-07-08 06:48:12 +00001422 }
1423
David Majnemer1bf0f8e2015-07-20 22:51:52 +00001424 // 'const', 'pure' and 'noalias' attributed functions are also nounwind.
Eric Christopherbf005ec2011-08-15 22:38:22 +00001425 if (TargetDecl->hasAttr<ConstAttr>()) {
Bill Wendling207f0532012-12-20 19:27:06 +00001426 FuncAttrs.addAttribute(llvm::Attribute::ReadNone);
1427 FuncAttrs.addAttribute(llvm::Attribute::NoUnwind);
Eric Christopherbf005ec2011-08-15 22:38:22 +00001428 } else if (TargetDecl->hasAttr<PureAttr>()) {
Bill Wendling207f0532012-12-20 19:27:06 +00001429 FuncAttrs.addAttribute(llvm::Attribute::ReadOnly);
1430 FuncAttrs.addAttribute(llvm::Attribute::NoUnwind);
David Majnemer1bf0f8e2015-07-20 22:51:52 +00001431 } else if (TargetDecl->hasAttr<NoAliasAttr>()) {
1432 FuncAttrs.addAttribute(llvm::Attribute::ArgMemOnly);
1433 FuncAttrs.addAttribute(llvm::Attribute::NoUnwind);
Eric Christopherbf005ec2011-08-15 22:38:22 +00001434 }
David Majnemer631a90b2015-02-04 07:23:21 +00001435 if (TargetDecl->hasAttr<RestrictAttr>())
Bill Wendling207f0532012-12-20 19:27:06 +00001436 RetAttrs.addAttribute(llvm::Attribute::NoAlias);
Hal Finkeld8442b12014-07-12 04:51:04 +00001437 if (TargetDecl->hasAttr<ReturnsNonNullAttr>())
1438 RetAttrs.addAttribute(llvm::Attribute::NonNull);
Paul Robinson08556952014-12-11 20:14:04 +00001439
1440 HasOptnone = TargetDecl->hasAttr<OptimizeNoneAttr>();
Daniel Dunbar76c8eb72008-09-10 00:32:18 +00001441 }
1442
Paul Robinson08556952014-12-11 20:14:04 +00001443 // OptimizeNoneAttr takes precedence over -Os or -Oz. No warning needed.
1444 if (!HasOptnone) {
1445 if (CodeGenOpts.OptimizeSize)
1446 FuncAttrs.addAttribute(llvm::Attribute::OptimizeForSize);
1447 if (CodeGenOpts.OptimizeSize == 2)
1448 FuncAttrs.addAttribute(llvm::Attribute::MinSize);
1449 }
1450
Chandler Carruthbc55fe22009-11-12 17:24:48 +00001451 if (CodeGenOpts.DisableRedZone)
Bill Wendling207f0532012-12-20 19:27:06 +00001452 FuncAttrs.addAttribute(llvm::Attribute::NoRedZone);
Chandler Carruthbc55fe22009-11-12 17:24:48 +00001453 if (CodeGenOpts.NoImplicitFloat)
Bill Wendling207f0532012-12-20 19:27:06 +00001454 FuncAttrs.addAttribute(llvm::Attribute::NoImplicitFloat);
Peter Collingbourneb4728c12014-05-19 22:14:34 +00001455 if (CodeGenOpts.EnableSegmentedStacks &&
1456 !(TargetDecl && TargetDecl->hasAttr<NoSplitStackAttr>()))
Reid Klecknerfb873af2014-04-10 22:59:13 +00001457 FuncAttrs.addAttribute("split-stack");
Devang Patel6e467b12009-06-04 23:32:02 +00001458
Bill Wendling2f81db62013-02-22 20:53:29 +00001459 if (AttrOnCallSite) {
1460 // Attributes that should go on the call site only.
1461 if (!CodeGenOpts.SimplifyLibCalls)
1462 FuncAttrs.addAttribute(llvm::Attribute::NoBuiltin);
Akira Hatanaka85365cd2015-07-02 22:15:41 +00001463 if (!CodeGenOpts.TrapFuncName.empty())
1464 FuncAttrs.addAttribute("trap-func-name", CodeGenOpts.TrapFuncName);
Bill Wendling706469b2013-02-28 22:49:57 +00001465 } else {
1466 // Attributes that should go on the function, but not the call site.
Bill Wendling706469b2013-02-28 22:49:57 +00001467 if (!CodeGenOpts.DisableFPElim) {
Bill Wendlingdabafea2013-03-13 22:24:33 +00001468 FuncAttrs.addAttribute("no-frame-pointer-elim", "false");
Bill Wendling706469b2013-02-28 22:49:57 +00001469 } else if (CodeGenOpts.OmitLeafFramePointer) {
Bill Wendlingdabafea2013-03-13 22:24:33 +00001470 FuncAttrs.addAttribute("no-frame-pointer-elim", "false");
Bill Wendling17d1b6142013-08-22 21:16:51 +00001471 FuncAttrs.addAttribute("no-frame-pointer-elim-non-leaf");
Bill Wendling706469b2013-02-28 22:49:57 +00001472 } else {
Bill Wendlingdabafea2013-03-13 22:24:33 +00001473 FuncAttrs.addAttribute("no-frame-pointer-elim", "true");
Bill Wendling17d1b6142013-08-22 21:16:51 +00001474 FuncAttrs.addAttribute("no-frame-pointer-elim-non-leaf");
Bill Wendling706469b2013-02-28 22:49:57 +00001475 }
1476
Akira Hatanaka262a4c42015-06-09 19:04:36 +00001477 FuncAttrs.addAttribute("disable-tail-calls",
1478 llvm::toStringRef(CodeGenOpts.DisableTailCalls));
Bill Wendlingdabafea2013-03-13 22:24:33 +00001479 FuncAttrs.addAttribute("less-precise-fpmad",
Bill Wendlingf69f5942013-07-26 21:51:11 +00001480 llvm::toStringRef(CodeGenOpts.LessPreciseFPMAD));
Bill Wendlingdabafea2013-03-13 22:24:33 +00001481 FuncAttrs.addAttribute("no-infs-fp-math",
Bill Wendlingf69f5942013-07-26 21:51:11 +00001482 llvm::toStringRef(CodeGenOpts.NoInfsFPMath));
Bill Wendlingdabafea2013-03-13 22:24:33 +00001483 FuncAttrs.addAttribute("no-nans-fp-math",
Bill Wendlingf69f5942013-07-26 21:51:11 +00001484 llvm::toStringRef(CodeGenOpts.NoNaNsFPMath));
Bill Wendlingdabafea2013-03-13 22:24:33 +00001485 FuncAttrs.addAttribute("unsafe-fp-math",
Bill Wendlingf69f5942013-07-26 21:51:11 +00001486 llvm::toStringRef(CodeGenOpts.UnsafeFPMath));
Bill Wendlingdabafea2013-03-13 22:24:33 +00001487 FuncAttrs.addAttribute("use-soft-float",
Bill Wendlingf69f5942013-07-26 21:51:11 +00001488 llvm::toStringRef(CodeGenOpts.SoftFloat));
Bill Wendlingb3219722013-07-22 20:15:41 +00001489 FuncAttrs.addAttribute("stack-protector-buffer-size",
Bill Wendling021c8de2013-07-12 22:26:07 +00001490 llvm::utostr(CodeGenOpts.SSPBufferSize));
Bill Wendlinga9cc8c02013-07-25 00:32:41 +00001491
Bill Wendlingd8f49502013-08-01 21:41:02 +00001492 if (!CodeGenOpts.StackRealignment)
1493 FuncAttrs.addAttribute("no-realign-stack");
Eric Christopher70c16652015-03-25 23:14:47 +00001494
Eric Christopher11acf732015-06-12 01:35:52 +00001495 // Add target-cpu and target-features attributes to functions. If
1496 // we have a decl for the function and it has a target attribute then
1497 // parse that and add it to the feature set.
1498 StringRef TargetCPU = getTarget().getTargetOpts().CPU;
Eric Christopher11acf732015-06-12 01:35:52 +00001499 const FunctionDecl *FD = dyn_cast_or_null<FunctionDecl>(TargetDecl);
Eric Christopher3a98b3c2015-08-27 19:59:34 +00001500 if (FD && FD->getAttr<TargetAttr>()) {
1501 llvm::StringMap<bool> FeatureMap;
Eric Christopherdec31be2015-08-31 23:19:55 +00001502 const auto *TD = FD->getAttr<TargetAttr>();
Eric Christopher11acf732015-06-12 01:35:52 +00001503
Eric Christopherdec31be2015-08-31 23:19:55 +00001504 // Make a copy of the features as passed on the command line.
1505 std::vector<std::string> FnFeatures =
1506 getTarget().getTargetOpts().FeaturesAsWritten;
1507
1508 // Grab the target attribute string.
1509 StringRef FeaturesStr = TD->getFeatures();
1510 SmallVector<StringRef, 1> AttrFeatures;
1511 FeaturesStr.split(AttrFeatures, ",");
1512
1513 // Grab the various features and prepend a "+" to turn on the feature to
1514 // the backend and add them to our existing set of features.
1515 for (auto &Feature : AttrFeatures) {
1516 // Go ahead and trim whitespace rather than either erroring or
1517 // accepting it weirdly.
1518 Feature = Feature.trim();
1519
1520 // While we're here iterating check for a different target cpu.
1521 if (Feature.startswith("arch="))
1522 TargetCPU = Feature.split("=").second.trim();
1523 else if (Feature.startswith("tune="))
1524 // We don't support cpu tuning this way currently.
1525 ;
1526 else if (Feature.startswith("fpmath="))
1527 // TODO: Support the fpmath option this way. It will require checking
1528 // overall feature validity for the function with the rest of the
1529 // attributes on the function.
1530 ;
1531 else if (Feature.startswith("no-"))
1532 FnFeatures.push_back("-" + Feature.split("-").second.str());
1533 else
1534 FnFeatures.push_back("+" + Feature.str());
1535 }
Eric Christopher3a98b3c2015-08-27 19:59:34 +00001536 // Now populate the feature map, first with the TargetCPU which is either
1537 // the default or a new one from the target attribute string. Then we'll
1538 // use the passed in features (FeaturesAsWritten) along with the new ones
1539 // from the attribute.
Eric Christopherdec31be2015-08-31 23:19:55 +00001540 getTarget().initFeatureMap(FeatureMap, Diags, TargetCPU, FnFeatures);
Eric Christopher11acf732015-06-12 01:35:52 +00001541
Eric Christopher3a98b3c2015-08-27 19:59:34 +00001542 // Produce the canonical string for this set of features.
1543 std::vector<std::string> Features;
1544 for (llvm::StringMap<bool>::const_iterator it = FeatureMap.begin(),
1545 ie = FeatureMap.end();
1546 it != ie; ++it)
1547 Features.push_back((it->second ? "+" : "-") + it->first().str());
Eric Christopher2249b812015-07-01 00:08:29 +00001548
Eric Christopher3a98b3c2015-08-27 19:59:34 +00001549 // Now add the target-cpu and target-features to the function.
1550 if (TargetCPU != "")
1551 FuncAttrs.addAttribute("target-cpu", TargetCPU);
1552 if (!Features.empty()) {
1553 std::sort(Features.begin(), Features.end());
1554 FuncAttrs.addAttribute(
1555 "target-features",
1556 llvm::join(Features.begin(), Features.end(), ","));
1557 }
1558 } else {
1559 // Otherwise just add the existing target cpu and target features to the
1560 // function.
1561 std::vector<std::string> &Features = getTarget().getTargetOpts().Features;
1562 if (TargetCPU != "")
1563 FuncAttrs.addAttribute("target-cpu", TargetCPU);
1564 if (!Features.empty()) {
1565 std::sort(Features.begin(), Features.end());
1566 FuncAttrs.addAttribute(
1567 "target-features",
1568 llvm::join(Features.begin(), Features.end(), ","));
1569 }
Eric Christopher70c16652015-03-25 23:14:47 +00001570 }
Bill Wendling985d1c52013-02-15 21:30:01 +00001571 }
1572
Alexey Samsonov153004f2014-09-29 22:08:00 +00001573 ClangToLLVMArgMapping IRFunctionArgs(getContext(), FI);
Alexey Samsonov91cf4552014-08-22 01:06:06 +00001574
Daniel Dunbar3668cb22009-02-02 23:43:58 +00001575 QualType RetTy = FI.getReturnType();
Daniel Dunbarb52d0772009-02-03 05:59:18 +00001576 const ABIArgInfo &RetAI = FI.getReturnInfo();
Daniel Dunbar7a95ca32008-09-10 04:01:49 +00001577 switch (RetAI.getKind()) {
Anton Korobeynikov18adbf52009-06-06 09:36:29 +00001578 case ABIArgInfo::Extend:
Jakob Stoklund Olesend7bf2932013-05-29 03:57:23 +00001579 if (RetTy->hasSignedIntegerRepresentation())
1580 RetAttrs.addAttribute(llvm::Attribute::SExt);
1581 else if (RetTy->hasUnsignedIntegerRepresentation())
1582 RetAttrs.addAttribute(llvm::Attribute::ZExt);
Jakob Stoklund Olesena3661142013-06-05 03:00:09 +00001583 // FALL THROUGH
Daniel Dunbar67dace892009-02-03 06:17:37 +00001584 case ABIArgInfo::Direct:
Jakob Stoklund Olesena3661142013-06-05 03:00:09 +00001585 if (RetAI.getInReg())
1586 RetAttrs.addAttribute(llvm::Attribute::InReg);
1587 break;
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00001588 case ABIArgInfo::Ignore:
Daniel Dunbara72d4ae2008-09-10 02:41:04 +00001589 break;
1590
Alexey Samsonov91cf4552014-08-22 01:06:06 +00001591 case ABIArgInfo::InAlloca:
Rafael Espindola06b2b4a2012-07-31 02:44:24 +00001592 case ABIArgInfo::Indirect: {
Alexey Samsonov91cf4552014-08-22 01:06:06 +00001593 // inalloca and sret disable readnone and readonly
Bill Wendling207f0532012-12-20 19:27:06 +00001594 FuncAttrs.removeAttribute(llvm::Attribute::ReadOnly)
1595 .removeAttribute(llvm::Attribute::ReadNone);
Daniel Dunbara72d4ae2008-09-10 02:41:04 +00001596 break;
Rafael Espindola06b2b4a2012-07-31 02:44:24 +00001597 }
Daniel Dunbara72d4ae2008-09-10 02:41:04 +00001598
Daniel Dunbard3674e62008-09-11 01:48:57 +00001599 case ABIArgInfo::Expand:
David Blaikie83d382b2011-09-23 05:06:16 +00001600 llvm_unreachable("Invalid ABI kind for return argument");
Daniel Dunbar76c8eb72008-09-10 00:32:18 +00001601 }
Daniel Dunbara72d4ae2008-09-10 02:41:04 +00001602
Hal Finkela2347ba2014-07-18 15:52:10 +00001603 if (const auto *RefTy = RetTy->getAs<ReferenceType>()) {
1604 QualType PTy = RefTy->getPointeeType();
1605 if (!PTy->isIncompleteType() && PTy->isConstantSizeType())
1606 RetAttrs.addDereferenceableAttr(getContext().getTypeSizeInChars(PTy)
1607 .getQuantity());
1608 else if (getContext().getTargetAddressSpace(PTy) == 0)
1609 RetAttrs.addAttribute(llvm::Attribute::NonNull);
1610 }
Nick Lewycky9b46eb82014-05-28 09:56:42 +00001611
Alexey Samsonov91cf4552014-08-22 01:06:06 +00001612 // Attach return attributes.
1613 if (RetAttrs.hasAttributes()) {
1614 PAL.push_back(llvm::AttributeSet::get(
1615 getLLVMContext(), llvm::AttributeSet::ReturnIndex, RetAttrs));
1616 }
Anton Korobeynikovc8478242009-04-04 00:49:24 +00001617
Alexey Samsonov91cf4552014-08-22 01:06:06 +00001618 // Attach attributes to sret.
1619 if (IRFunctionArgs.hasSRetArg()) {
1620 llvm::AttrBuilder SRETAttrs;
1621 SRETAttrs.addAttribute(llvm::Attribute::StructRet);
1622 if (RetAI.getInReg())
1623 SRETAttrs.addAttribute(llvm::Attribute::InReg);
1624 PAL.push_back(llvm::AttributeSet::get(
1625 getLLVMContext(), IRFunctionArgs.getSRetArgNo() + 1, SRETAttrs));
1626 }
1627
1628 // Attach attributes to inalloca argument.
1629 if (IRFunctionArgs.hasInallocaArg()) {
1630 llvm::AttrBuilder Attrs;
1631 Attrs.addAttribute(llvm::Attribute::InAlloca);
1632 PAL.push_back(llvm::AttributeSet::get(
1633 getLLVMContext(), IRFunctionArgs.getInallocaArgNo() + 1, Attrs));
1634 }
1635
Alexey Samsonov91cf4552014-08-22 01:06:06 +00001636 unsigned ArgNo = 0;
1637 for (CGFunctionInfo::const_arg_iterator I = FI.arg_begin(),
1638 E = FI.arg_end();
1639 I != E; ++I, ++ArgNo) {
1640 QualType ParamType = I->type;
1641 const ABIArgInfo &AI = I->info;
Bill Wendlinga514ebc2012-10-15 20:36:26 +00001642 llvm::AttrBuilder Attrs;
Anton Korobeynikovc8478242009-04-04 00:49:24 +00001643
Alexey Samsonov91cf4552014-08-22 01:06:06 +00001644 // Add attribute for padding argument, if necessary.
1645 if (IRFunctionArgs.hasPaddingArg(ArgNo)) {
Bill Wendling290d9522013-01-27 02:46:53 +00001646 if (AI.getPaddingInReg())
Alexey Samsonov91cf4552014-08-22 01:06:06 +00001647 PAL.push_back(llvm::AttributeSet::get(
1648 getLLVMContext(), IRFunctionArgs.getPaddingArgNo(ArgNo) + 1,
1649 llvm::Attribute::InReg));
Rafael Espindolafad28de2012-10-24 01:59:00 +00001650 }
1651
John McCall39ec71f2010-03-27 00:47:27 +00001652 // 'restrict' -> 'noalias' is done in EmitFunctionProlog when we
1653 // have the corresponding parameter variable. It doesn't make
Daniel Dunbarcb2b3d02011-02-10 18:10:07 +00001654 // sense to do it here because parameters are so messed up.
Daniel Dunbard3674e62008-09-11 01:48:57 +00001655 switch (AI.getKind()) {
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00001656 case ABIArgInfo::Extend:
Douglas Gregor6ab2fa82011-05-20 16:38:50 +00001657 if (ParamType->isSignedIntegerOrEnumerationType())
Bill Wendling207f0532012-12-20 19:27:06 +00001658 Attrs.addAttribute(llvm::Attribute::SExt);
Petar Jovanovic1a3f9652015-05-26 21:07:19 +00001659 else if (ParamType->isUnsignedIntegerOrEnumerationType()) {
1660 if (getTypes().getABIInfo().shouldSignExtUnsignedType(ParamType))
1661 Attrs.addAttribute(llvm::Attribute::SExt);
1662 else
1663 Attrs.addAttribute(llvm::Attribute::ZExt);
1664 }
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00001665 // FALL THROUGH
Alexey Samsonov91cf4552014-08-22 01:06:06 +00001666 case ABIArgInfo::Direct:
Peter Collingbournef7706832014-12-12 23:41:25 +00001667 if (ArgNo == 0 && FI.isChainCall())
1668 Attrs.addAttribute(llvm::Attribute::Nest);
1669 else if (AI.getInReg())
Bill Wendling207f0532012-12-20 19:27:06 +00001670 Attrs.addAttribute(llvm::Attribute::InReg);
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00001671 break;
Alexey Samsonov91cf4552014-08-22 01:06:06 +00001672
James Y Knight71608572015-08-21 18:19:06 +00001673 case ABIArgInfo::Indirect: {
Rafael Espindola703c47f2012-10-19 05:04:37 +00001674 if (AI.getInReg())
Bill Wendling207f0532012-12-20 19:27:06 +00001675 Attrs.addAttribute(llvm::Attribute::InReg);
Rafael Espindola703c47f2012-10-19 05:04:37 +00001676
Anders Carlsson20759ad2009-09-16 15:53:40 +00001677 if (AI.getIndirectByVal())
Bill Wendling207f0532012-12-20 19:27:06 +00001678 Attrs.addAttribute(llvm::Attribute::ByVal);
Anders Carlsson20759ad2009-09-16 15:53:40 +00001679
James Y Knight71608572015-08-21 18:19:06 +00001680 unsigned Align = AI.getIndirectAlign();
1681
1682 // In a byval argument, it is important that the required
1683 // alignment of the type is honored, as LLVM might be creating a
1684 // *new* stack object, and needs to know what alignment to give
1685 // it. (Sometimes it can deduce a sensible alignment on its own,
1686 // but not if clang decides it must emit a packed struct, or the
1687 // user specifies increased alignment requirements.)
1688 //
1689 // This is different from indirect *not* byval, where the object
1690 // exists already, and the align attribute is purely
1691 // informative.
1692 if (Align == 0 && AI.getIndirectByVal())
1693 Align = getContext().getTypeAlignInChars(ParamType).getQuantity();
1694
1695 Attrs.addAlignmentAttr(Align);
Bill Wendlinga7912f82012-10-10 07:36:56 +00001696
Daniel Dunbarc2304432009-03-18 19:51:01 +00001697 // byval disables readnone and readonly.
Bill Wendling207f0532012-12-20 19:27:06 +00001698 FuncAttrs.removeAttribute(llvm::Attribute::ReadOnly)
1699 .removeAttribute(llvm::Attribute::ReadNone);
Daniel Dunbard3674e62008-09-11 01:48:57 +00001700 break;
James Y Knight71608572015-08-21 18:19:06 +00001701 }
Daniel Dunbar94a6f252009-01-26 21:26:08 +00001702 case ABIArgInfo::Ignore:
Alexey Samsonov91cf4552014-08-22 01:06:06 +00001703 case ABIArgInfo::Expand:
Mike Stump11289f42009-09-09 15:08:12 +00001704 continue;
Daniel Dunbar94a6f252009-01-26 21:26:08 +00001705
Reid Kleckner314ef7b2014-02-01 00:04:45 +00001706 case ABIArgInfo::InAlloca:
1707 // inalloca disables readnone and readonly.
1708 FuncAttrs.removeAttribute(llvm::Attribute::ReadOnly)
1709 .removeAttribute(llvm::Attribute::ReadNone);
Reid Kleckner314ef7b2014-02-01 00:04:45 +00001710 continue;
Daniel Dunbar76c8eb72008-09-10 00:32:18 +00001711 }
Mike Stump11289f42009-09-09 15:08:12 +00001712
Hal Finkela2347ba2014-07-18 15:52:10 +00001713 if (const auto *RefTy = ParamType->getAs<ReferenceType>()) {
1714 QualType PTy = RefTy->getPointeeType();
1715 if (!PTy->isIncompleteType() && PTy->isConstantSizeType())
1716 Attrs.addDereferenceableAttr(getContext().getTypeSizeInChars(PTy)
1717 .getQuantity());
1718 else if (getContext().getTargetAddressSpace(PTy) == 0)
1719 Attrs.addAttribute(llvm::Attribute::NonNull);
1720 }
Nick Lewycky9b46eb82014-05-28 09:56:42 +00001721
Alexey Samsonov91cf4552014-08-22 01:06:06 +00001722 if (Attrs.hasAttributes()) {
1723 unsigned FirstIRArg, NumIRArgs;
1724 std::tie(FirstIRArg, NumIRArgs) = IRFunctionArgs.getIRArgs(ArgNo);
1725 for (unsigned i = 0; i < NumIRArgs; i++)
1726 PAL.push_back(llvm::AttributeSet::get(getLLVMContext(),
1727 FirstIRArg + i + 1, Attrs));
1728 }
Daniel Dunbar76c8eb72008-09-10 00:32:18 +00001729 }
Alexey Samsonov91cf4552014-08-22 01:06:06 +00001730 assert(ArgNo == FI.arg_size());
Reid Kleckner314ef7b2014-02-01 00:04:45 +00001731
Bill Wendlinga7912f82012-10-10 07:36:56 +00001732 if (FuncAttrs.hasAttributes())
Bill Wendling4f0c0802012-10-15 07:31:59 +00001733 PAL.push_back(llvm::
Bill Wendling290d9522013-01-27 02:46:53 +00001734 AttributeSet::get(getLLVMContext(),
1735 llvm::AttributeSet::FunctionIndex,
1736 FuncAttrs));
Daniel Dunbar76c8eb72008-09-10 00:32:18 +00001737}
1738
John McCalla738c252011-03-09 04:27:21 +00001739/// An argument came in as a promoted argument; demote it back to its
1740/// declared type.
1741static llvm::Value *emitArgumentDemotion(CodeGenFunction &CGF,
1742 const VarDecl *var,
1743 llvm::Value *value) {
Chris Lattner2192fe52011-07-18 04:24:23 +00001744 llvm::Type *varType = CGF.ConvertType(var->getType());
John McCalla738c252011-03-09 04:27:21 +00001745
1746 // This can happen with promotions that actually don't change the
1747 // underlying type, like the enum promotions.
1748 if (value->getType() == varType) return value;
1749
1750 assert((varType->isIntegerTy() || varType->isFloatingPointTy())
1751 && "unexpected promotion type");
1752
1753 if (isa<llvm::IntegerType>(varType))
1754 return CGF.Builder.CreateTrunc(value, varType, "arg.unpromote");
1755
1756 return CGF.Builder.CreateFPCast(value, varType, "arg.unpromote");
1757}
1758
Alexey Samsonov8e1162c2014-09-08 17:22:45 +00001759/// Returns the attribute (either parameter attribute, or function
1760/// attribute), which declares argument ArgNo to be non-null.
1761static const NonNullAttr *getNonNullAttr(const Decl *FD, const ParmVarDecl *PVD,
1762 QualType ArgType, unsigned ArgNo) {
Alexey Samsonov9fc9bf82014-08-28 00:53:20 +00001763 // FIXME: __attribute__((nonnull)) can also be applied to:
1764 // - references to pointers, where the pointee is known to be
1765 // nonnull (apparently a Clang extension)
1766 // - transparent unions containing pointers
1767 // In the former case, LLVM IR cannot represent the constraint. In
1768 // the latter case, we have no guarantee that the transparent union
1769 // is in fact passed as a pointer.
Alexey Samsonov8e1162c2014-09-08 17:22:45 +00001770 if (!ArgType->isAnyPointerType() && !ArgType->isBlockPointerType())
1771 return nullptr;
Alexey Samsonov9fc9bf82014-08-28 00:53:20 +00001772 // First, check attribute on parameter itself.
Alexey Samsonov8e1162c2014-09-08 17:22:45 +00001773 if (PVD) {
1774 if (auto ParmNNAttr = PVD->getAttr<NonNullAttr>())
1775 return ParmNNAttr;
1776 }
Alexey Samsonov9fc9bf82014-08-28 00:53:20 +00001777 // Check function attributes.
1778 if (!FD)
Alexey Samsonov8e1162c2014-09-08 17:22:45 +00001779 return nullptr;
Alexey Samsonov9fc9bf82014-08-28 00:53:20 +00001780 for (const auto *NNAttr : FD->specific_attrs<NonNullAttr>()) {
Alexey Samsonov8e1162c2014-09-08 17:22:45 +00001781 if (NNAttr->isNonNull(ArgNo))
1782 return NNAttr;
Alexey Samsonov9fc9bf82014-08-28 00:53:20 +00001783 }
Alexey Samsonov8e1162c2014-09-08 17:22:45 +00001784 return nullptr;
Alexey Samsonov9fc9bf82014-08-28 00:53:20 +00001785}
1786
Daniel Dunbard931a872009-02-02 22:03:45 +00001787void CodeGenFunction::EmitFunctionProlog(const CGFunctionInfo &FI,
1788 llvm::Function *Fn,
Daniel Dunbar613855c2008-09-09 23:27:19 +00001789 const FunctionArgList &Args) {
Hans Wennborgd71907d2014-09-04 22:16:33 +00001790 if (CurCodeDecl && CurCodeDecl->hasAttr<NakedAttr>())
1791 // Naked functions don't have prologues.
1792 return;
1793
John McCallcaa19452009-07-28 01:00:58 +00001794 // If this is an implicit-return-zero function, go ahead and
1795 // initialize the return value. TODO: it might be nice to have
1796 // a more general mechanism for this that didn't require synthesized
1797 // return statements.
John McCalldec348f72013-05-03 07:33:41 +00001798 if (const FunctionDecl *FD = dyn_cast_or_null<FunctionDecl>(CurCodeDecl)) {
John McCallcaa19452009-07-28 01:00:58 +00001799 if (FD->hasImplicitReturnZero()) {
Alp Toker314cc812014-01-25 16:55:45 +00001800 QualType RetTy = FD->getReturnType().getUnqualifiedType();
Chris Lattner2192fe52011-07-18 04:24:23 +00001801 llvm::Type* LLVMTy = CGM.getTypes().ConvertType(RetTy);
Owen Anderson0b75f232009-07-31 20:28:54 +00001802 llvm::Constant* Zero = llvm::Constant::getNullValue(LLVMTy);
John McCallcaa19452009-07-28 01:00:58 +00001803 Builder.CreateStore(Zero, ReturnValue);
1804 }
1805 }
1806
Mike Stump18bb9282009-05-16 07:57:57 +00001807 // FIXME: We no longer need the types from FunctionArgList; lift up and
1808 // simplify.
Daniel Dunbar5a0acdc92009-02-03 06:02:10 +00001809
Alexey Samsonov153004f2014-09-29 22:08:00 +00001810 ClangToLLVMArgMapping IRFunctionArgs(CGM.getContext(), FI);
Alexey Samsonov91cf4552014-08-22 01:06:06 +00001811 // Flattened function arguments.
1812 SmallVector<llvm::Argument *, 16> FnArgs;
1813 FnArgs.reserve(IRFunctionArgs.totalIRArgs());
1814 for (auto &Arg : Fn->args()) {
1815 FnArgs.push_back(&Arg);
1816 }
1817 assert(FnArgs.size() == IRFunctionArgs.totalIRArgs());
Mike Stump11289f42009-09-09 15:08:12 +00001818
Reid Kleckner314ef7b2014-02-01 00:04:45 +00001819 // If we're using inalloca, all the memory arguments are GEPs off of the last
1820 // parameter, which is a pointer to the complete memory area.
Craig Topper8a13c412014-05-21 05:09:00 +00001821 llvm::Value *ArgStruct = nullptr;
Alexey Samsonov91cf4552014-08-22 01:06:06 +00001822 if (IRFunctionArgs.hasInallocaArg()) {
1823 ArgStruct = FnArgs[IRFunctionArgs.getInallocaArgNo()];
Reid Kleckner314ef7b2014-02-01 00:04:45 +00001824 assert(ArgStruct->getType() == FI.getArgStruct()->getPointerTo());
1825 }
1826
Alexey Samsonov91cf4552014-08-22 01:06:06 +00001827 // Name the struct return parameter.
1828 if (IRFunctionArgs.hasSRetArg()) {
1829 auto AI = FnArgs[IRFunctionArgs.getSRetArgNo()];
Daniel Dunbar613855c2008-09-09 23:27:19 +00001830 AI->setName("agg.result");
Reid Kleckner37abaca2014-05-09 22:46:15 +00001831 AI->addAttr(llvm::AttributeSet::get(getLLVMContext(), AI->getArgNo() + 1,
Bill Wendlingce2f9c52013-01-23 06:15:10 +00001832 llvm::Attribute::NoAlias));
Daniel Dunbar613855c2008-09-09 23:27:19 +00001833 }
Mike Stump11289f42009-09-09 15:08:12 +00001834
Reid Kleckner314ef7b2014-02-01 00:04:45 +00001835 // Track if we received the parameter as a pointer (indirect, byval, or
1836 // inalloca). If already have a pointer, EmitParmDecl doesn't need to copy it
1837 // into a local alloca for us.
1838 enum ValOrPointer { HaveValue = 0, HavePointer = 1 };
Reid Kleckner8ae16272014-02-01 00:23:22 +00001839 typedef llvm::PointerIntPair<llvm::Value *, 1> ValueAndIsPtr;
Reid Kleckner314ef7b2014-02-01 00:04:45 +00001840 SmallVector<ValueAndIsPtr, 16> ArgVals;
1841 ArgVals.reserve(Args.size());
1842
Reid Kleckner739756c2013-12-04 19:23:12 +00001843 // Create a pointer value for every parameter declaration. This usually
1844 // entails copying one or more LLVM IR arguments into an alloca. Don't push
1845 // any cleanups or do anything that might unwind. We do that separately, so
1846 // we can push the cleanups in the correct order for the ABI.
Daniel Dunbara45bdbb2009-02-04 21:17:21 +00001847 assert(FI.arg_size() == Args.size() &&
1848 "Mismatch between function signature & arguments.");
Alexey Samsonov91cf4552014-08-22 01:06:06 +00001849 unsigned ArgNo = 0;
Daniel Dunbarb52d0772009-02-03 05:59:18 +00001850 CGFunctionInfo::const_arg_iterator info_it = FI.arg_begin();
Alexey Samsonov91cf4552014-08-22 01:06:06 +00001851 for (FunctionArgList::const_iterator i = Args.begin(), e = Args.end();
Devang Patel68a15252011-03-03 20:13:15 +00001852 i != e; ++i, ++info_it, ++ArgNo) {
John McCalla738c252011-03-09 04:27:21 +00001853 const VarDecl *Arg = *i;
Daniel Dunbarb52d0772009-02-03 05:59:18 +00001854 QualType Ty = info_it->type;
1855 const ABIArgInfo &ArgI = info_it->info;
Daniel Dunbard3674e62008-09-11 01:48:57 +00001856
John McCalla738c252011-03-09 04:27:21 +00001857 bool isPromoted =
1858 isa<ParmVarDecl>(Arg) && cast<ParmVarDecl>(Arg)->isKNRPromoted();
1859
Alexey Samsonov91cf4552014-08-22 01:06:06 +00001860 unsigned FirstIRArg, NumIRArgs;
1861 std::tie(FirstIRArg, NumIRArgs) = IRFunctionArgs.getIRArgs(ArgNo);
Rafael Espindolafad28de2012-10-24 01:59:00 +00001862
Daniel Dunbard3674e62008-09-11 01:48:57 +00001863 switch (ArgI.getKind()) {
Reid Kleckner314ef7b2014-02-01 00:04:45 +00001864 case ABIArgInfo::InAlloca: {
Alexey Samsonov91cf4552014-08-22 01:06:06 +00001865 assert(NumIRArgs == 0);
David Blaikie1ed728c2015-04-05 22:45:47 +00001866 llvm::Value *V =
1867 Builder.CreateStructGEP(FI.getArgStruct(), ArgStruct,
1868 ArgI.getInAllocaFieldIndex(), Arg->getName());
Reid Kleckner314ef7b2014-02-01 00:04:45 +00001869 ArgVals.push_back(ValueAndIsPtr(V, HavePointer));
Alexey Samsonov91cf4552014-08-22 01:06:06 +00001870 break;
Reid Kleckner314ef7b2014-02-01 00:04:45 +00001871 }
1872
Daniel Dunbar747865a2009-02-05 09:16:39 +00001873 case ABIArgInfo::Indirect: {
Alexey Samsonov91cf4552014-08-22 01:06:06 +00001874 assert(NumIRArgs == 1);
1875 llvm::Value *V = FnArgs[FirstIRArg];
Daniel Dunbar7b7c2932010-09-16 20:42:02 +00001876
John McCall47fb9502013-03-07 21:37:08 +00001877 if (!hasScalarEvaluationKind(Ty)) {
Daniel Dunbar7b7c2932010-09-16 20:42:02 +00001878 // Aggregates and complex variables are accessed by reference. All we
1879 // need to do is realign the value, if requested
1880 if (ArgI.getIndirectRealign()) {
1881 llvm::Value *AlignedTemp = CreateMemTemp(Ty, "coerce");
1882
1883 // Copy from the incoming argument pointer to the temporary with the
1884 // appropriate alignment.
1885 //
1886 // FIXME: We should have a common utility for generating an aggregate
1887 // copy.
Chris Lattner2192fe52011-07-18 04:24:23 +00001888 llvm::Type *I8PtrTy = Builder.getInt8PtrTy();
Ken Dyck705ba072011-01-19 01:58:38 +00001889 CharUnits Size = getContext().getTypeSizeInChars(Ty);
NAKAMURA Takumidd634362011-03-10 14:02:21 +00001890 llvm::Value *Dst = Builder.CreateBitCast(AlignedTemp, I8PtrTy);
1891 llvm::Value *Src = Builder.CreateBitCast(V, I8PtrTy);
1892 Builder.CreateMemCpy(Dst,
1893 Src,
Ken Dyck705ba072011-01-19 01:58:38 +00001894 llvm::ConstantInt::get(IntPtrTy,
1895 Size.getQuantity()),
Benjamin Krameracc6b4e2010-12-30 00:13:21 +00001896 ArgI.getIndirectAlign(),
1897 false);
Daniel Dunbar7b7c2932010-09-16 20:42:02 +00001898 V = AlignedTemp;
1899 }
Reid Kleckner314ef7b2014-02-01 00:04:45 +00001900 ArgVals.push_back(ValueAndIsPtr(V, HavePointer));
Daniel Dunbar747865a2009-02-05 09:16:39 +00001901 } else {
1902 // Load scalar value from indirect argument.
David Majnemere1544562015-04-24 01:25:05 +00001903 V = EmitLoadOfScalar(V, false, ArgI.getIndirectAlign(), Ty,
Nick Lewycky2d84e842013-10-02 02:29:49 +00001904 Arg->getLocStart());
John McCalla738c252011-03-09 04:27:21 +00001905
1906 if (isPromoted)
1907 V = emitArgumentDemotion(*this, Arg, V);
Reid Kleckner314ef7b2014-02-01 00:04:45 +00001908 ArgVals.push_back(ValueAndIsPtr(V, HaveValue));
Daniel Dunbar747865a2009-02-05 09:16:39 +00001909 }
Daniel Dunbar747865a2009-02-05 09:16:39 +00001910 break;
1911 }
Anton Korobeynikov18adbf52009-06-06 09:36:29 +00001912
1913 case ABIArgInfo::Extend:
Daniel Dunbar67dace892009-02-03 06:17:37 +00001914 case ABIArgInfo::Direct: {
Akira Hatanaka18334dd2012-01-09 19:08:06 +00001915
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00001916 // If we have the trivial case, handle it with no muss and fuss.
1917 if (!isa<llvm::StructType>(ArgI.getCoerceToType()) &&
Chris Lattner8a2f3c72010-07-30 04:02:24 +00001918 ArgI.getCoerceToType() == ConvertType(Ty) &&
1919 ArgI.getDirectOffset() == 0) {
Alexey Samsonov91cf4552014-08-22 01:06:06 +00001920 assert(NumIRArgs == 1);
1921 auto AI = FnArgs[FirstIRArg];
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00001922 llvm::Value *V = AI;
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00001923
Hal Finkel48d53e22014-07-19 01:41:07 +00001924 if (const ParmVarDecl *PVD = dyn_cast<ParmVarDecl>(Arg)) {
Alexey Samsonov8e1162c2014-09-08 17:22:45 +00001925 if (getNonNullAttr(CurCodeDecl, PVD, PVD->getType(),
1926 PVD->getFunctionScopeIndex()))
Hal Finkel82504f02014-07-11 17:35:21 +00001927 AI->addAttr(llvm::AttributeSet::get(getLLVMContext(),
1928 AI->getArgNo() + 1,
1929 llvm::Attribute::NonNull));
1930
Hal Finkel48d53e22014-07-19 01:41:07 +00001931 QualType OTy = PVD->getOriginalType();
1932 if (const auto *ArrTy =
1933 getContext().getAsConstantArrayType(OTy)) {
1934 // A C99 array parameter declaration with the static keyword also
1935 // indicates dereferenceability, and if the size is constant we can
1936 // use the dereferenceable attribute (which requires the size in
1937 // bytes).
Hal Finkel16e394a2014-07-19 02:13:40 +00001938 if (ArrTy->getSizeModifier() == ArrayType::Static) {
Hal Finkel48d53e22014-07-19 01:41:07 +00001939 QualType ETy = ArrTy->getElementType();
1940 uint64_t ArrSize = ArrTy->getSize().getZExtValue();
1941 if (!ETy->isIncompleteType() && ETy->isConstantSizeType() &&
1942 ArrSize) {
1943 llvm::AttrBuilder Attrs;
1944 Attrs.addDereferenceableAttr(
1945 getContext().getTypeSizeInChars(ETy).getQuantity()*ArrSize);
1946 AI->addAttr(llvm::AttributeSet::get(getLLVMContext(),
1947 AI->getArgNo() + 1, Attrs));
1948 } else if (getContext().getTargetAddressSpace(ETy) == 0) {
1949 AI->addAttr(llvm::AttributeSet::get(getLLVMContext(),
1950 AI->getArgNo() + 1,
1951 llvm::Attribute::NonNull));
1952 }
1953 }
1954 } else if (const auto *ArrTy =
1955 getContext().getAsVariableArrayType(OTy)) {
1956 // For C99 VLAs with the static keyword, we don't know the size so
1957 // we can't use the dereferenceable attribute, but in addrspace(0)
1958 // we know that it must be nonnull.
1959 if (ArrTy->getSizeModifier() == VariableArrayType::Static &&
1960 !getContext().getTargetAddressSpace(ArrTy->getElementType()))
1961 AI->addAttr(llvm::AttributeSet::get(getLLVMContext(),
1962 AI->getArgNo() + 1,
1963 llvm::Attribute::NonNull));
1964 }
Hal Finkel1b0d24e2014-10-02 21:21:25 +00001965
1966 const auto *AVAttr = PVD->getAttr<AlignValueAttr>();
1967 if (!AVAttr)
1968 if (const auto *TOTy = dyn_cast<TypedefType>(OTy))
1969 AVAttr = TOTy->getDecl()->getAttr<AlignValueAttr>();
1970 if (AVAttr) {
1971 llvm::Value *AlignmentValue =
1972 EmitScalarExpr(AVAttr->getAlignment());
1973 llvm::ConstantInt *AlignmentCI =
1974 cast<llvm::ConstantInt>(AlignmentValue);
1975 unsigned Alignment =
1976 std::min((unsigned) AlignmentCI->getZExtValue(),
1977 +llvm::Value::MaximumAlignment);
1978
1979 llvm::AttrBuilder Attrs;
1980 Attrs.addAlignmentAttr(Alignment);
1981 AI->addAttr(llvm::AttributeSet::get(getLLVMContext(),
1982 AI->getArgNo() + 1, Attrs));
1983 }
Hal Finkel48d53e22014-07-19 01:41:07 +00001984 }
1985
Bill Wendling507c3512012-10-16 05:23:44 +00001986 if (Arg->getType().isRestrictQualified())
Bill Wendlingce2f9c52013-01-23 06:15:10 +00001987 AI->addAttr(llvm::AttributeSet::get(getLLVMContext(),
1988 AI->getArgNo() + 1,
1989 llvm::Attribute::NoAlias));
John McCall39ec71f2010-03-27 00:47:27 +00001990
Chris Lattner7369c142011-07-20 06:29:00 +00001991 // Ensure the argument is the correct type.
1992 if (V->getType() != ArgI.getCoerceToType())
1993 V = Builder.CreateBitCast(V, ArgI.getCoerceToType());
1994
John McCalla738c252011-03-09 04:27:21 +00001995 if (isPromoted)
1996 V = emitArgumentDemotion(*this, Arg, V);
Rafael Espindola8778c282012-11-29 16:09:03 +00001997
Nick Lewycky5fa40c32013-10-01 21:51:38 +00001998 if (const CXXMethodDecl *MD =
1999 dyn_cast_or_null<CXXMethodDecl>(CurCodeDecl)) {
Timur Iskhodzhanov88fd4392013-08-21 06:25:03 +00002000 if (MD->isVirtual() && Arg == CXXABIThisDecl)
Nick Lewycky5fa40c32013-10-01 21:51:38 +00002001 V = CGM.getCXXABI().
2002 adjustThisParameterInVirtualFunctionPrologue(*this, CurGD, V);
Timur Iskhodzhanov88fd4392013-08-21 06:25:03 +00002003 }
2004
Rafael Espindola8778c282012-11-29 16:09:03 +00002005 // Because of merging of function types from multiple decls it is
2006 // possible for the type of an argument to not match the corresponding
2007 // type in the function type. Since we are codegening the callee
2008 // in here, add a cast to the argument type.
2009 llvm::Type *LTy = ConvertType(Arg->getType());
2010 if (V->getType() != LTy)
2011 V = Builder.CreateBitCast(V, LTy);
2012
Reid Kleckner314ef7b2014-02-01 00:04:45 +00002013 ArgVals.push_back(ValueAndIsPtr(V, HaveValue));
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00002014 break;
Daniel Dunbard5f1f552009-02-10 00:06:49 +00002015 }
Mike Stump11289f42009-09-09 15:08:12 +00002016
Evgeniy Stepanov3fae4ae2012-02-10 09:30:15 +00002017 llvm::AllocaInst *Alloca = CreateMemTemp(Ty, Arg->getName());
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00002018
Chris Lattnerff941a62010-07-28 18:24:28 +00002019 // The alignment we need to use is the max of the requested alignment for
2020 // the argument plus the alignment required by our access code below.
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00002021 unsigned AlignmentToUse =
Micah Villmowdd31ca12012-10-08 16:25:52 +00002022 CGM.getDataLayout().getABITypeAlignment(ArgI.getCoerceToType());
Chris Lattnerff941a62010-07-28 18:24:28 +00002023 AlignmentToUse = std::max(AlignmentToUse,
2024 (unsigned)getContext().getDeclAlign(Arg).getQuantity());
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00002025
Chris Lattnerff941a62010-07-28 18:24:28 +00002026 Alloca->setAlignment(AlignmentToUse);
Chris Lattnerc401de92010-07-05 20:21:00 +00002027 llvm::Value *V = Alloca;
Chris Lattner8a2f3c72010-07-30 04:02:24 +00002028 llvm::Value *Ptr = V; // Pointer to store into.
Ulrich Weigand6e2cea62015-07-10 11:31:43 +00002029 CharUnits PtrAlign = CharUnits::fromQuantity(AlignmentToUse);
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00002030
Chris Lattner8a2f3c72010-07-30 04:02:24 +00002031 // If the value is offset in memory, apply the offset now.
2032 if (unsigned Offs = ArgI.getDirectOffset()) {
2033 Ptr = Builder.CreateBitCast(Ptr, Builder.getInt8PtrTy());
David Blaikie5e259a82015-04-03 22:54:16 +00002034 Ptr = Builder.CreateConstGEP1_32(Builder.getInt8Ty(), Ptr, Offs);
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00002035 Ptr = Builder.CreateBitCast(Ptr,
Chris Lattner8a2f3c72010-07-30 04:02:24 +00002036 llvm::PointerType::getUnqual(ArgI.getCoerceToType()));
Ulrich Weigand6e2cea62015-07-10 11:31:43 +00002037 PtrAlign = PtrAlign.alignmentAtOffset(CharUnits::fromQuantity(Offs));
Chris Lattner8a2f3c72010-07-30 04:02:24 +00002038 }
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00002039
Oliver Stannard2bfdc5b2014-08-27 10:43:15 +00002040 // Fast-isel and the optimizer generally like scalar values better than
2041 // FCAs, so we flatten them if this is safe to do for this argument.
Evgeniy Stepanov3fae4ae2012-02-10 09:30:15 +00002042 llvm::StructType *STy = dyn_cast<llvm::StructType>(ArgI.getCoerceToType());
Oliver Stannard2bfdc5b2014-08-27 10:43:15 +00002043 if (ArgI.isDirect() && ArgI.getCanBeFlattened() && STy &&
2044 STy->getNumElements() > 1) {
Micah Villmowdd31ca12012-10-08 16:25:52 +00002045 uint64_t SrcSize = CGM.getDataLayout().getTypeAllocSize(STy);
Evgeniy Stepanov3fae4ae2012-02-10 09:30:15 +00002046 llvm::Type *DstTy =
2047 cast<llvm::PointerType>(Ptr->getType())->getElementType();
Micah Villmowdd31ca12012-10-08 16:25:52 +00002048 uint64_t DstSize = CGM.getDataLayout().getTypeAllocSize(DstTy);
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00002049
Evgeniy Stepanov3fae4ae2012-02-10 09:30:15 +00002050 if (SrcSize <= DstSize) {
2051 Ptr = Builder.CreateBitCast(Ptr, llvm::PointerType::getUnqual(STy));
2052
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002053 assert(STy->getNumElements() == NumIRArgs);
Evgeniy Stepanov3fae4ae2012-02-10 09:30:15 +00002054 for (unsigned i = 0, e = STy->getNumElements(); i != e; ++i) {
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002055 auto AI = FnArgs[FirstIRArg + i];
Evgeniy Stepanov3fae4ae2012-02-10 09:30:15 +00002056 AI->setName(Arg->getName() + ".coerce" + Twine(i));
David Blaikie17ea2662015-04-04 21:07:17 +00002057 llvm::Value *EltPtr = Builder.CreateConstGEP2_32(STy, Ptr, 0, i);
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002058 Builder.CreateStore(AI, EltPtr);
Evgeniy Stepanov3fae4ae2012-02-10 09:30:15 +00002059 }
2060 } else {
2061 llvm::AllocaInst *TempAlloca =
2062 CreateTempAlloca(ArgI.getCoerceToType(), "coerce");
2063 TempAlloca->setAlignment(AlignmentToUse);
2064 llvm::Value *TempV = TempAlloca;
2065
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002066 assert(STy->getNumElements() == NumIRArgs);
Evgeniy Stepanov3fae4ae2012-02-10 09:30:15 +00002067 for (unsigned i = 0, e = STy->getNumElements(); i != e; ++i) {
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002068 auto AI = FnArgs[FirstIRArg + i];
Evgeniy Stepanov3fae4ae2012-02-10 09:30:15 +00002069 AI->setName(Arg->getName() + ".coerce" + Twine(i));
David Blaikie17ea2662015-04-04 21:07:17 +00002070 llvm::Value *EltPtr =
2071 Builder.CreateConstGEP2_32(ArgI.getCoerceToType(), TempV, 0, i);
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002072 Builder.CreateStore(AI, EltPtr);
Evgeniy Stepanov3fae4ae2012-02-10 09:30:15 +00002073 }
2074
2075 Builder.CreateMemCpy(Ptr, TempV, DstSize, AlignmentToUse);
Chris Lattner15ec3612010-06-29 00:06:42 +00002076 }
2077 } else {
2078 // Simple case, just do a coerced store of the argument into the alloca.
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002079 assert(NumIRArgs == 1);
2080 auto AI = FnArgs[FirstIRArg];
Chris Lattner9e748e92010-06-29 00:14:52 +00002081 AI->setName(Arg->getName() + ".coerce");
Ulrich Weigand6e2cea62015-07-10 11:31:43 +00002082 CreateCoercedStore(AI, Ptr, /*DestIsVolatile=*/false, PtrAlign, *this);
Chris Lattner15ec3612010-06-29 00:06:42 +00002083 }
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00002084
2085
Daniel Dunbar2f219b02009-02-03 19:12:28 +00002086 // Match to what EmitParmDecl is expecting for this type.
John McCall47fb9502013-03-07 21:37:08 +00002087 if (CodeGenFunction::hasScalarEvaluationKind(Ty)) {
Nick Lewycky2d84e842013-10-02 02:29:49 +00002088 V = EmitLoadOfScalar(V, false, AlignmentToUse, Ty, Arg->getLocStart());
John McCalla738c252011-03-09 04:27:21 +00002089 if (isPromoted)
2090 V = emitArgumentDemotion(*this, Arg, V);
Reid Kleckner314ef7b2014-02-01 00:04:45 +00002091 ArgVals.push_back(ValueAndIsPtr(V, HaveValue));
2092 } else {
2093 ArgVals.push_back(ValueAndIsPtr(V, HavePointer));
Daniel Dunbar6e3b7df2009-02-04 07:22:24 +00002094 }
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002095 break;
Daniel Dunbar2f219b02009-02-03 19:12:28 +00002096 }
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00002097
2098 case ABIArgInfo::Expand: {
2099 // If this structure was expanded into multiple arguments then
2100 // we need to create a temporary and reconstruct it from the
2101 // arguments.
Eli Friedman3d9f47f2011-11-03 21:39:02 +00002102 llvm::AllocaInst *Alloca = CreateMemTemp(Ty);
Eli Friedmana0544d62011-12-03 04:14:32 +00002103 CharUnits Align = getContext().getDeclAlign(Arg);
2104 Alloca->setAlignment(Align.getQuantity());
2105 LValue LV = MakeAddrLValue(Alloca, Ty, Align);
Reid Kleckner314ef7b2014-02-01 00:04:45 +00002106 ArgVals.push_back(ValueAndIsPtr(Alloca, HavePointer));
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00002107
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002108 auto FnArgIter = FnArgs.begin() + FirstIRArg;
2109 ExpandTypeFromArgs(Ty, LV, FnArgIter);
2110 assert(FnArgIter == FnArgs.begin() + FirstIRArg + NumIRArgs);
2111 for (unsigned i = 0, e = NumIRArgs; i != e; ++i) {
2112 auto AI = FnArgs[FirstIRArg + i];
2113 AI->setName(Arg->getName() + "." + Twine(i));
2114 }
2115 break;
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00002116 }
2117
2118 case ABIArgInfo::Ignore:
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002119 assert(NumIRArgs == 0);
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00002120 // Initialize the local variable appropriately.
Reid Kleckner314ef7b2014-02-01 00:04:45 +00002121 if (!hasScalarEvaluationKind(Ty)) {
2122 ArgVals.push_back(ValueAndIsPtr(CreateMemTemp(Ty), HavePointer));
2123 } else {
2124 llvm::Value *U = llvm::UndefValue::get(ConvertType(Arg->getType()));
2125 ArgVals.push_back(ValueAndIsPtr(U, HaveValue));
2126 }
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002127 break;
Daniel Dunbard3674e62008-09-11 01:48:57 +00002128 }
Daniel Dunbar613855c2008-09-09 23:27:19 +00002129 }
Reid Kleckner314ef7b2014-02-01 00:04:45 +00002130
Reid Kleckner739756c2013-12-04 19:23:12 +00002131 if (getTarget().getCXXABI().areArgsDestroyedLeftToRightInCallee()) {
2132 for (int I = Args.size() - 1; I >= 0; --I)
Reid Kleckner314ef7b2014-02-01 00:04:45 +00002133 EmitParmDecl(*Args[I], ArgVals[I].getPointer(), ArgVals[I].getInt(),
2134 I + 1);
Reid Kleckner739756c2013-12-04 19:23:12 +00002135 } else {
2136 for (unsigned I = 0, E = Args.size(); I != E; ++I)
Reid Kleckner314ef7b2014-02-01 00:04:45 +00002137 EmitParmDecl(*Args[I], ArgVals[I].getPointer(), ArgVals[I].getInt(),
2138 I + 1);
Reid Kleckner739756c2013-12-04 19:23:12 +00002139 }
Daniel Dunbar613855c2008-09-09 23:27:19 +00002140}
2141
John McCallffa2c1a2012-01-29 07:46:59 +00002142static void eraseUnusedBitCasts(llvm::Instruction *insn) {
2143 while (insn->use_empty()) {
2144 llvm::BitCastInst *bitcast = dyn_cast<llvm::BitCastInst>(insn);
2145 if (!bitcast) return;
2146
2147 // This is "safe" because we would have used a ConstantExpr otherwise.
2148 insn = cast<llvm::Instruction>(bitcast->getOperand(0));
2149 bitcast->eraseFromParent();
2150 }
2151}
2152
John McCall31168b02011-06-15 23:02:42 +00002153/// Try to emit a fused autorelease of a return result.
2154static llvm::Value *tryEmitFusedAutoreleaseOfResult(CodeGenFunction &CGF,
2155 llvm::Value *result) {
2156 // We must be immediately followed the cast.
2157 llvm::BasicBlock *BB = CGF.Builder.GetInsertBlock();
Craig Topper8a13c412014-05-21 05:09:00 +00002158 if (BB->empty()) return nullptr;
2159 if (&BB->back() != result) return nullptr;
John McCall31168b02011-06-15 23:02:42 +00002160
Chris Lattner2192fe52011-07-18 04:24:23 +00002161 llvm::Type *resultType = result->getType();
John McCall31168b02011-06-15 23:02:42 +00002162
2163 // result is in a BasicBlock and is therefore an Instruction.
2164 llvm::Instruction *generator = cast<llvm::Instruction>(result);
2165
Chris Lattner0e62c1c2011-07-23 10:55:15 +00002166 SmallVector<llvm::Instruction*,4> insnsToKill;
John McCall31168b02011-06-15 23:02:42 +00002167
2168 // Look for:
2169 // %generator = bitcast %type1* %generator2 to %type2*
2170 while (llvm::BitCastInst *bitcast = dyn_cast<llvm::BitCastInst>(generator)) {
2171 // We would have emitted this as a constant if the operand weren't
2172 // an Instruction.
2173 generator = cast<llvm::Instruction>(bitcast->getOperand(0));
2174
2175 // Require the generator to be immediately followed by the cast.
2176 if (generator->getNextNode() != bitcast)
Craig Topper8a13c412014-05-21 05:09:00 +00002177 return nullptr;
John McCall31168b02011-06-15 23:02:42 +00002178
2179 insnsToKill.push_back(bitcast);
2180 }
2181
2182 // Look for:
2183 // %generator = call i8* @objc_retain(i8* %originalResult)
2184 // or
2185 // %generator = call i8* @objc_retainAutoreleasedReturnValue(i8* %originalResult)
2186 llvm::CallInst *call = dyn_cast<llvm::CallInst>(generator);
Craig Topper8a13c412014-05-21 05:09:00 +00002187 if (!call) return nullptr;
John McCall31168b02011-06-15 23:02:42 +00002188
2189 bool doRetainAutorelease;
2190
2191 if (call->getCalledValue() == CGF.CGM.getARCEntrypoints().objc_retain) {
2192 doRetainAutorelease = true;
2193 } else if (call->getCalledValue() == CGF.CGM.getARCEntrypoints()
2194 .objc_retainAutoreleasedReturnValue) {
2195 doRetainAutorelease = false;
2196
John McCallcfa4e9b2012-09-07 23:30:50 +00002197 // If we emitted an assembly marker for this call (and the
2198 // ARCEntrypoints field should have been set if so), go looking
2199 // for that call. If we can't find it, we can't do this
2200 // optimization. But it should always be the immediately previous
2201 // instruction, unless we needed bitcasts around the call.
2202 if (CGF.CGM.getARCEntrypoints().retainAutoreleasedReturnValueMarker) {
2203 llvm::Instruction *prev = call->getPrevNode();
2204 assert(prev);
2205 if (isa<llvm::BitCastInst>(prev)) {
2206 prev = prev->getPrevNode();
2207 assert(prev);
2208 }
2209 assert(isa<llvm::CallInst>(prev));
2210 assert(cast<llvm::CallInst>(prev)->getCalledValue() ==
2211 CGF.CGM.getARCEntrypoints().retainAutoreleasedReturnValueMarker);
2212 insnsToKill.push_back(prev);
2213 }
John McCall31168b02011-06-15 23:02:42 +00002214 } else {
Craig Topper8a13c412014-05-21 05:09:00 +00002215 return nullptr;
John McCall31168b02011-06-15 23:02:42 +00002216 }
2217
2218 result = call->getArgOperand(0);
2219 insnsToKill.push_back(call);
2220
2221 // Keep killing bitcasts, for sanity. Note that we no longer care
2222 // about precise ordering as long as there's exactly one use.
2223 while (llvm::BitCastInst *bitcast = dyn_cast<llvm::BitCastInst>(result)) {
2224 if (!bitcast->hasOneUse()) break;
2225 insnsToKill.push_back(bitcast);
2226 result = bitcast->getOperand(0);
2227 }
2228
2229 // Delete all the unnecessary instructions, from latest to earliest.
Chris Lattner0e62c1c2011-07-23 10:55:15 +00002230 for (SmallVectorImpl<llvm::Instruction*>::iterator
John McCall31168b02011-06-15 23:02:42 +00002231 i = insnsToKill.begin(), e = insnsToKill.end(); i != e; ++i)
2232 (*i)->eraseFromParent();
2233
2234 // Do the fused retain/autorelease if we were asked to.
2235 if (doRetainAutorelease)
2236 result = CGF.EmitARCRetainAutoreleaseReturnValue(result);
2237
2238 // Cast back to the result type.
2239 return CGF.Builder.CreateBitCast(result, resultType);
2240}
2241
John McCallffa2c1a2012-01-29 07:46:59 +00002242/// If this is a +1 of the value of an immutable 'self', remove it.
2243static llvm::Value *tryRemoveRetainOfSelf(CodeGenFunction &CGF,
2244 llvm::Value *result) {
2245 // This is only applicable to a method with an immutable 'self'.
John McCallff755cd2012-07-31 00:33:55 +00002246 const ObjCMethodDecl *method =
2247 dyn_cast_or_null<ObjCMethodDecl>(CGF.CurCodeDecl);
Craig Topper8a13c412014-05-21 05:09:00 +00002248 if (!method) return nullptr;
John McCallffa2c1a2012-01-29 07:46:59 +00002249 const VarDecl *self = method->getSelfDecl();
Craig Topper8a13c412014-05-21 05:09:00 +00002250 if (!self->getType().isConstQualified()) return nullptr;
John McCallffa2c1a2012-01-29 07:46:59 +00002251
2252 // Look for a retain call.
2253 llvm::CallInst *retainCall =
2254 dyn_cast<llvm::CallInst>(result->stripPointerCasts());
2255 if (!retainCall ||
2256 retainCall->getCalledValue() != CGF.CGM.getARCEntrypoints().objc_retain)
Craig Topper8a13c412014-05-21 05:09:00 +00002257 return nullptr;
John McCallffa2c1a2012-01-29 07:46:59 +00002258
2259 // Look for an ordinary load of 'self'.
2260 llvm::Value *retainedValue = retainCall->getArgOperand(0);
2261 llvm::LoadInst *load =
2262 dyn_cast<llvm::LoadInst>(retainedValue->stripPointerCasts());
2263 if (!load || load->isAtomic() || load->isVolatile() ||
2264 load->getPointerOperand() != CGF.GetAddrOfLocalVar(self))
Craig Topper8a13c412014-05-21 05:09:00 +00002265 return nullptr;
John McCallffa2c1a2012-01-29 07:46:59 +00002266
2267 // Okay! Burn it all down. This relies for correctness on the
2268 // assumption that the retain is emitted as part of the return and
2269 // that thereafter everything is used "linearly".
2270 llvm::Type *resultType = result->getType();
2271 eraseUnusedBitCasts(cast<llvm::Instruction>(result));
2272 assert(retainCall->use_empty());
2273 retainCall->eraseFromParent();
2274 eraseUnusedBitCasts(cast<llvm::Instruction>(retainedValue));
2275
2276 return CGF.Builder.CreateBitCast(load, resultType);
2277}
2278
John McCall31168b02011-06-15 23:02:42 +00002279/// Emit an ARC autorelease of the result of a function.
John McCallffa2c1a2012-01-29 07:46:59 +00002280///
2281/// \return the value to actually return from the function
John McCall31168b02011-06-15 23:02:42 +00002282static llvm::Value *emitAutoreleaseOfResult(CodeGenFunction &CGF,
2283 llvm::Value *result) {
John McCallffa2c1a2012-01-29 07:46:59 +00002284 // If we're returning 'self', kill the initial retain. This is a
2285 // heuristic attempt to "encourage correctness" in the really unfortunate
2286 // case where we have a return of self during a dealloc and we desperately
2287 // need to avoid the possible autorelease.
2288 if (llvm::Value *self = tryRemoveRetainOfSelf(CGF, result))
2289 return self;
2290
John McCall31168b02011-06-15 23:02:42 +00002291 // At -O0, try to emit a fused retain/autorelease.
2292 if (CGF.shouldUseFusedARCCalls())
2293 if (llvm::Value *fused = tryEmitFusedAutoreleaseOfResult(CGF, result))
2294 return fused;
2295
2296 return CGF.EmitARCAutoreleaseReturnValue(result);
2297}
2298
John McCall6e1c0122012-01-29 02:35:02 +00002299/// Heuristically search for a dominating store to the return-value slot.
2300static llvm::StoreInst *findDominatingStoreToReturnValue(CodeGenFunction &CGF) {
2301 // If there are multiple uses of the return-value slot, just check
2302 // for something immediately preceding the IP. Sometimes this can
2303 // happen with how we generate implicit-returns; it can also happen
2304 // with noreturn cleanups.
2305 if (!CGF.ReturnValue->hasOneUse()) {
2306 llvm::BasicBlock *IP = CGF.Builder.GetInsertBlock();
Craig Topper8a13c412014-05-21 05:09:00 +00002307 if (IP->empty()) return nullptr;
David Majnemerdc012fa2015-04-22 21:38:15 +00002308 llvm::Instruction *I = &IP->back();
2309
2310 // Skip lifetime markers
2311 for (llvm::BasicBlock::reverse_iterator II = IP->rbegin(),
2312 IE = IP->rend();
2313 II != IE; ++II) {
2314 if (llvm::IntrinsicInst *Intrinsic =
2315 dyn_cast<llvm::IntrinsicInst>(&*II)) {
2316 if (Intrinsic->getIntrinsicID() == llvm::Intrinsic::lifetime_end) {
2317 const llvm::Value *CastAddr = Intrinsic->getArgOperand(1);
2318 ++II;
Alexey Samsonov10544202015-06-12 21:05:32 +00002319 if (II == IE)
2320 break;
2321 if (isa<llvm::BitCastInst>(&*II) && (CastAddr == &*II))
2322 continue;
David Majnemerdc012fa2015-04-22 21:38:15 +00002323 }
2324 }
2325 I = &*II;
2326 break;
2327 }
2328
2329 llvm::StoreInst *store = dyn_cast<llvm::StoreInst>(I);
Craig Topper8a13c412014-05-21 05:09:00 +00002330 if (!store) return nullptr;
2331 if (store->getPointerOperand() != CGF.ReturnValue) return nullptr;
John McCall6e1c0122012-01-29 02:35:02 +00002332 assert(!store->isAtomic() && !store->isVolatile()); // see below
2333 return store;
2334 }
2335
2336 llvm::StoreInst *store =
Chandler Carruth4d01fff2014-03-09 03:16:50 +00002337 dyn_cast<llvm::StoreInst>(CGF.ReturnValue->user_back());
Craig Topper8a13c412014-05-21 05:09:00 +00002338 if (!store) return nullptr;
John McCall6e1c0122012-01-29 02:35:02 +00002339
2340 // These aren't actually possible for non-coerced returns, and we
2341 // only care about non-coerced returns on this code path.
2342 assert(!store->isAtomic() && !store->isVolatile());
2343
2344 // Now do a first-and-dirty dominance check: just walk up the
2345 // single-predecessors chain from the current insertion point.
2346 llvm::BasicBlock *StoreBB = store->getParent();
2347 llvm::BasicBlock *IP = CGF.Builder.GetInsertBlock();
2348 while (IP != StoreBB) {
2349 if (!(IP = IP->getSinglePredecessor()))
Craig Topper8a13c412014-05-21 05:09:00 +00002350 return nullptr;
John McCall6e1c0122012-01-29 02:35:02 +00002351 }
2352
2353 // Okay, the store's basic block dominates the insertion point; we
2354 // can do our thing.
2355 return store;
2356}
2357
Adrian Prantl3be10542013-05-02 17:30:20 +00002358void CodeGenFunction::EmitFunctionEpilog(const CGFunctionInfo &FI,
Nick Lewycky2d84e842013-10-02 02:29:49 +00002359 bool EmitRetDbgLoc,
2360 SourceLocation EndLoc) {
Hans Wennborgd71907d2014-09-04 22:16:33 +00002361 if (CurCodeDecl && CurCodeDecl->hasAttr<NakedAttr>()) {
2362 // Naked functions don't have epilogues.
2363 Builder.CreateUnreachable();
2364 return;
2365 }
2366
Daniel Dunbara72d4ae2008-09-10 02:41:04 +00002367 // Functions with no result always return void.
Craig Topper8a13c412014-05-21 05:09:00 +00002368 if (!ReturnValue) {
Daniel Dunbara72d4ae2008-09-10 02:41:04 +00002369 Builder.CreateRetVoid();
Chris Lattner726b3d02010-06-26 23:13:19 +00002370 return;
Daniel Dunbara72d4ae2008-09-10 02:41:04 +00002371 }
Daniel Dunbar6696e222010-06-30 21:27:58 +00002372
Dan Gohman481e40c2010-07-20 20:13:52 +00002373 llvm::DebugLoc RetDbgLoc;
Craig Topper8a13c412014-05-21 05:09:00 +00002374 llvm::Value *RV = nullptr;
Chris Lattner726b3d02010-06-26 23:13:19 +00002375 QualType RetTy = FI.getReturnType();
2376 const ABIArgInfo &RetAI = FI.getReturnInfo();
2377
2378 switch (RetAI.getKind()) {
Reid Kleckner314ef7b2014-02-01 00:04:45 +00002379 case ABIArgInfo::InAlloca:
Reid Klecknerfab1e892014-02-25 00:59:14 +00002380 // Aggregrates get evaluated directly into the destination. Sometimes we
2381 // need to return the sret value in a register, though.
2382 assert(hasAggregateEvaluationKind(RetTy));
2383 if (RetAI.getInAllocaSRet()) {
2384 llvm::Function::arg_iterator EI = CurFn->arg_end();
2385 --EI;
2386 llvm::Value *ArgStruct = EI;
David Blaikie2e804282015-04-05 22:47:07 +00002387 llvm::Value *SRet = Builder.CreateStructGEP(
2388 nullptr, ArgStruct, RetAI.getInAllocaFieldIndex());
Reid Klecknerfab1e892014-02-25 00:59:14 +00002389 RV = Builder.CreateLoad(SRet, "sret");
2390 }
Reid Kleckner314ef7b2014-02-01 00:04:45 +00002391 break;
2392
Daniel Dunbar03816342010-08-21 02:24:36 +00002393 case ABIArgInfo::Indirect: {
Reid Kleckner37abaca2014-05-09 22:46:15 +00002394 auto AI = CurFn->arg_begin();
2395 if (RetAI.isSRetAfterThis())
2396 ++AI;
John McCall47fb9502013-03-07 21:37:08 +00002397 switch (getEvaluationKind(RetTy)) {
2398 case TEK_Complex: {
2399 ComplexPairTy RT =
Nick Lewycky2d84e842013-10-02 02:29:49 +00002400 EmitLoadOfComplex(MakeNaturalAlignAddrLValue(ReturnValue, RetTy),
2401 EndLoc);
Reid Kleckner37abaca2014-05-09 22:46:15 +00002402 EmitStoreOfComplex(RT, MakeNaturalAlignAddrLValue(AI, RetTy),
John McCall47fb9502013-03-07 21:37:08 +00002403 /*isInit*/ true);
2404 break;
2405 }
2406 case TEK_Aggregate:
Chris Lattner726b3d02010-06-26 23:13:19 +00002407 // Do nothing; aggregrates get evaluated directly into the destination.
John McCall47fb9502013-03-07 21:37:08 +00002408 break;
2409 case TEK_Scalar:
2410 EmitStoreOfScalar(Builder.CreateLoad(ReturnValue),
Reid Kleckner37abaca2014-05-09 22:46:15 +00002411 MakeNaturalAlignAddrLValue(AI, RetTy),
John McCall47fb9502013-03-07 21:37:08 +00002412 /*isInit*/ true);
2413 break;
Chris Lattner726b3d02010-06-26 23:13:19 +00002414 }
2415 break;
Daniel Dunbar03816342010-08-21 02:24:36 +00002416 }
Chris Lattner726b3d02010-06-26 23:13:19 +00002417
2418 case ABIArgInfo::Extend:
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00002419 case ABIArgInfo::Direct:
Chris Lattner8a2f3c72010-07-30 04:02:24 +00002420 if (RetAI.getCoerceToType() == ConvertType(RetTy) &&
2421 RetAI.getDirectOffset() == 0) {
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00002422 // The internal return value temp always will have pointer-to-return-type
2423 // type, just do a load.
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00002424
John McCall6e1c0122012-01-29 02:35:02 +00002425 // If there is a dominating store to ReturnValue, we can elide
2426 // the load, zap the store, and usually zap the alloca.
David Majnemerdc012fa2015-04-22 21:38:15 +00002427 if (llvm::StoreInst *SI =
2428 findDominatingStoreToReturnValue(*this)) {
Adrian Prantl4c9a38a2013-05-30 18:12:23 +00002429 // Reuse the debug location from the store unless there is
2430 // cleanup code to be emitted between the store and return
2431 // instruction.
2432 if (EmitRetDbgLoc && !AutoreleaseResult)
Adrian Prantl3be10542013-05-02 17:30:20 +00002433 RetDbgLoc = SI->getDebugLoc();
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00002434 // Get the stored value and nuke the now-dead store.
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00002435 RV = SI->getValueOperand();
2436 SI->eraseFromParent();
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00002437
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00002438 // If that was the only use of the return value, nuke it as well now.
2439 if (ReturnValue->use_empty() && isa<llvm::AllocaInst>(ReturnValue)) {
2440 cast<llvm::AllocaInst>(ReturnValue)->eraseFromParent();
Craig Topper8a13c412014-05-21 05:09:00 +00002441 ReturnValue = nullptr;
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00002442 }
John McCall6e1c0122012-01-29 02:35:02 +00002443
2444 // Otherwise, we have to do a simple load.
2445 } else {
2446 RV = Builder.CreateLoad(ReturnValue);
Chris Lattner3fcc7902010-06-27 01:06:27 +00002447 }
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00002448 } else {
Chris Lattner8a2f3c72010-07-30 04:02:24 +00002449 llvm::Value *V = ReturnValue;
Ulrich Weigand6e2cea62015-07-10 11:31:43 +00002450 CharUnits Align = getContext().getTypeAlignInChars(RetTy);
Chris Lattner8a2f3c72010-07-30 04:02:24 +00002451 // If the value is offset in memory, apply the offset now.
2452 if (unsigned Offs = RetAI.getDirectOffset()) {
2453 V = Builder.CreateBitCast(V, Builder.getInt8PtrTy());
David Blaikie5e259a82015-04-03 22:54:16 +00002454 V = Builder.CreateConstGEP1_32(Builder.getInt8Ty(), V, Offs);
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00002455 V = Builder.CreateBitCast(V,
Chris Lattner8a2f3c72010-07-30 04:02:24 +00002456 llvm::PointerType::getUnqual(RetAI.getCoerceToType()));
Ulrich Weigand6e2cea62015-07-10 11:31:43 +00002457 Align = Align.alignmentAtOffset(CharUnits::fromQuantity(Offs));
Chris Lattner8a2f3c72010-07-30 04:02:24 +00002458 }
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00002459
Ulrich Weigand6e2cea62015-07-10 11:31:43 +00002460 RV = CreateCoercedLoad(V, RetAI.getCoerceToType(), Align, *this);
Chris Lattner3fcc7902010-06-27 01:06:27 +00002461 }
John McCall31168b02011-06-15 23:02:42 +00002462
2463 // In ARC, end functions that return a retainable type with a call
2464 // to objc_autoreleaseReturnValue.
2465 if (AutoreleaseResult) {
David Blaikiebbafb8a2012-03-11 07:00:24 +00002466 assert(getLangOpts().ObjCAutoRefCount &&
John McCall31168b02011-06-15 23:02:42 +00002467 !FI.isReturnsRetained() &&
2468 RetTy->isObjCRetainableType());
2469 RV = emitAutoreleaseOfResult(*this, RV);
2470 }
2471
Chris Lattner726b3d02010-06-26 23:13:19 +00002472 break;
Chris Lattner726b3d02010-06-26 23:13:19 +00002473
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00002474 case ABIArgInfo::Ignore:
Chris Lattner726b3d02010-06-26 23:13:19 +00002475 break;
2476
2477 case ABIArgInfo::Expand:
David Blaikie83d382b2011-09-23 05:06:16 +00002478 llvm_unreachable("Invalid ABI kind for return argument");
Chris Lattner726b3d02010-06-26 23:13:19 +00002479 }
2480
Alexey Samsonovde443c52014-08-13 00:26:40 +00002481 llvm::Instruction *Ret;
2482 if (RV) {
Alexey Samsonovedf99a92014-11-07 22:29:38 +00002483 if (SanOpts.has(SanitizerKind::ReturnsNonnullAttribute)) {
Alexey Samsonov90452df2014-09-08 20:17:19 +00002484 if (auto RetNNAttr = CurGD.getDecl()->getAttr<ReturnsNonNullAttr>()) {
2485 SanitizerScope SanScope(this);
2486 llvm::Value *Cond = Builder.CreateICmpNE(
2487 RV, llvm::Constant::getNullValue(RV->getType()));
2488 llvm::Constant *StaticData[] = {
2489 EmitCheckSourceLocation(EndLoc),
2490 EmitCheckSourceLocation(RetNNAttr->getLocation()),
2491 };
Alexey Samsonove396bfc2014-11-11 22:03:54 +00002492 EmitCheck(std::make_pair(Cond, SanitizerKind::ReturnsNonnullAttribute),
2493 "nonnull_return", StaticData, None);
Alexey Samsonov90452df2014-09-08 20:17:19 +00002494 }
Alexey Samsonovde443c52014-08-13 00:26:40 +00002495 }
2496 Ret = Builder.CreateRet(RV);
2497 } else {
2498 Ret = Builder.CreateRetVoid();
2499 }
2500
Duncan P. N. Exon Smith2809cc72015-03-30 20:01:41 +00002501 if (RetDbgLoc)
Benjamin Kramer03278662015-02-07 13:15:54 +00002502 Ret->setDebugLoc(std::move(RetDbgLoc));
Daniel Dunbar613855c2008-09-09 23:27:19 +00002503}
2504
Reid Kleckner314ef7b2014-02-01 00:04:45 +00002505static bool isInAllocaArgument(CGCXXABI &ABI, QualType type) {
2506 const CXXRecordDecl *RD = type->getAsCXXRecordDecl();
2507 return RD && ABI.getRecordArgABI(RD) == CGCXXABI::RAA_DirectInMemory;
2508}
2509
2510static AggValueSlot createPlaceholderSlot(CodeGenFunction &CGF, QualType Ty) {
2511 // FIXME: Generate IR in one pass, rather than going back and fixing up these
2512 // placeholders.
2513 llvm::Type *IRTy = CGF.ConvertTypeForMem(Ty);
2514 llvm::Value *Placeholder =
2515 llvm::UndefValue::get(IRTy->getPointerTo()->getPointerTo());
2516 Placeholder = CGF.Builder.CreateLoad(Placeholder);
2517 return AggValueSlot::forAddr(Placeholder, CharUnits::Zero(),
2518 Ty.getQualifiers(),
2519 AggValueSlot::IsNotDestructed,
2520 AggValueSlot::DoesNotNeedGCBarriers,
2521 AggValueSlot::IsNotAliased);
2522}
2523
John McCall32ea9692011-03-11 20:59:21 +00002524void CodeGenFunction::EmitDelegateCallArg(CallArgList &args,
Nick Lewycky2d84e842013-10-02 02:29:49 +00002525 const VarDecl *param,
2526 SourceLocation loc) {
John McCall23f66262010-05-26 22:34:26 +00002527 // StartFunction converted the ABI-lowered parameter(s) into a
2528 // local alloca. We need to turn that into an r-value suitable
2529 // for EmitCall.
John McCall32ea9692011-03-11 20:59:21 +00002530 llvm::Value *local = GetAddrOfLocalVar(param);
John McCall23f66262010-05-26 22:34:26 +00002531
John McCall32ea9692011-03-11 20:59:21 +00002532 QualType type = param->getType();
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00002533
John McCall23f66262010-05-26 22:34:26 +00002534 // For the most part, we just need to load the alloca, except:
2535 // 1) aggregate r-values are actually pointers to temporaries, and
John McCall47fb9502013-03-07 21:37:08 +00002536 // 2) references to non-scalars are pointers directly to the aggregate.
2537 // I don't know why references to scalars are different here.
John McCall32ea9692011-03-11 20:59:21 +00002538 if (const ReferenceType *ref = type->getAs<ReferenceType>()) {
John McCall47fb9502013-03-07 21:37:08 +00002539 if (!hasScalarEvaluationKind(ref->getPointeeType()))
John McCall32ea9692011-03-11 20:59:21 +00002540 return args.add(RValue::getAggregate(local), type);
John McCall23f66262010-05-26 22:34:26 +00002541
2542 // Locals which are references to scalars are represented
2543 // with allocas holding the pointer.
John McCall32ea9692011-03-11 20:59:21 +00002544 return args.add(RValue::get(Builder.CreateLoad(local)), type);
John McCall23f66262010-05-26 22:34:26 +00002545 }
2546
Reid Klecknerab2090d2014-07-26 01:34:32 +00002547 assert(!isInAllocaArgument(CGM.getCXXABI(), type) &&
2548 "cannot emit delegate call arguments for inalloca arguments!");
Reid Kleckner314ef7b2014-02-01 00:04:45 +00002549
Nick Lewycky2d84e842013-10-02 02:29:49 +00002550 args.add(convertTempToRValue(local, type, loc), type);
John McCall23f66262010-05-26 22:34:26 +00002551}
2552
John McCall31168b02011-06-15 23:02:42 +00002553static bool isProvablyNull(llvm::Value *addr) {
2554 return isa<llvm::ConstantPointerNull>(addr);
2555}
2556
2557static bool isProvablyNonNull(llvm::Value *addr) {
2558 return isa<llvm::AllocaInst>(addr);
2559}
2560
2561/// Emit the actual writing-back of a writeback.
2562static void emitWriteback(CodeGenFunction &CGF,
2563 const CallArgList::Writeback &writeback) {
John McCalleff18842013-03-23 02:35:54 +00002564 const LValue &srcLV = writeback.Source;
2565 llvm::Value *srcAddr = srcLV.getAddress();
John McCall31168b02011-06-15 23:02:42 +00002566 assert(!isProvablyNull(srcAddr) &&
2567 "shouldn't have writeback for provably null argument");
2568
Craig Topper8a13c412014-05-21 05:09:00 +00002569 llvm::BasicBlock *contBB = nullptr;
John McCall31168b02011-06-15 23:02:42 +00002570
2571 // If the argument wasn't provably non-null, we need to null check
2572 // before doing the store.
2573 bool provablyNonNull = isProvablyNonNull(srcAddr);
2574 if (!provablyNonNull) {
2575 llvm::BasicBlock *writebackBB = CGF.createBasicBlock("icr.writeback");
2576 contBB = CGF.createBasicBlock("icr.done");
2577
2578 llvm::Value *isNull = CGF.Builder.CreateIsNull(srcAddr, "icr.isnull");
2579 CGF.Builder.CreateCondBr(isNull, contBB, writebackBB);
2580 CGF.EmitBlock(writebackBB);
2581 }
2582
2583 // Load the value to writeback.
2584 llvm::Value *value = CGF.Builder.CreateLoad(writeback.Temporary);
2585
2586 // Cast it back, in case we're writing an id to a Foo* or something.
2587 value = CGF.Builder.CreateBitCast(value,
2588 cast<llvm::PointerType>(srcAddr->getType())->getElementType(),
2589 "icr.writeback-cast");
2590
2591 // Perform the writeback.
John McCalleff18842013-03-23 02:35:54 +00002592
2593 // If we have a "to use" value, it's something we need to emit a use
2594 // of. This has to be carefully threaded in: if it's done after the
2595 // release it's potentially undefined behavior (and the optimizer
2596 // will ignore it), and if it happens before the retain then the
2597 // optimizer could move the release there.
2598 if (writeback.ToUse) {
2599 assert(srcLV.getObjCLifetime() == Qualifiers::OCL_Strong);
2600
2601 // Retain the new value. No need to block-copy here: the block's
2602 // being passed up the stack.
2603 value = CGF.EmitARCRetainNonBlock(value);
2604
2605 // Emit the intrinsic use here.
2606 CGF.EmitARCIntrinsicUse(writeback.ToUse);
2607
2608 // Load the old value (primitively).
Nick Lewycky2d84e842013-10-02 02:29:49 +00002609 llvm::Value *oldValue = CGF.EmitLoadOfScalar(srcLV, SourceLocation());
John McCalleff18842013-03-23 02:35:54 +00002610
2611 // Put the new value in place (primitively).
2612 CGF.EmitStoreOfScalar(value, srcLV, /*init*/ false);
2613
2614 // Release the old value.
2615 CGF.EmitARCRelease(oldValue, srcLV.isARCPreciseLifetime());
2616
2617 // Otherwise, we can just do a normal lvalue store.
2618 } else {
2619 CGF.EmitStoreThroughLValue(RValue::get(value), srcLV);
2620 }
John McCall31168b02011-06-15 23:02:42 +00002621
2622 // Jump to the continuation block.
2623 if (!provablyNonNull)
2624 CGF.EmitBlock(contBB);
2625}
2626
2627static void emitWritebacks(CodeGenFunction &CGF,
2628 const CallArgList &args) {
Aaron Ballman36a7fa82014-03-17 17:22:27 +00002629 for (const auto &I : args.writebacks())
2630 emitWriteback(CGF, I);
John McCall31168b02011-06-15 23:02:42 +00002631}
2632
Reid Kleckner23f4c4b2013-06-21 12:45:15 +00002633static void deactivateArgCleanupsBeforeCall(CodeGenFunction &CGF,
2634 const CallArgList &CallArgs) {
Reid Kleckner739756c2013-12-04 19:23:12 +00002635 assert(CGF.getTarget().getCXXABI().areArgsDestroyedLeftToRightInCallee());
Reid Kleckner23f4c4b2013-06-21 12:45:15 +00002636 ArrayRef<CallArgList::CallArgCleanup> Cleanups =
2637 CallArgs.getCleanupsToDeactivate();
2638 // Iterate in reverse to increase the likelihood of popping the cleanup.
Pete Cooper57d3f142015-07-30 17:22:52 +00002639 for (const auto &I : llvm::reverse(Cleanups)) {
2640 CGF.DeactivateCleanupBlock(I.Cleanup, I.IsActiveIP);
2641 I.IsActiveIP->eraseFromParent();
Reid Kleckner23f4c4b2013-06-21 12:45:15 +00002642 }
2643}
2644
John McCalleff18842013-03-23 02:35:54 +00002645static const Expr *maybeGetUnaryAddrOfOperand(const Expr *E) {
2646 if (const UnaryOperator *uop = dyn_cast<UnaryOperator>(E->IgnoreParens()))
2647 if (uop->getOpcode() == UO_AddrOf)
2648 return uop->getSubExpr();
Craig Topper8a13c412014-05-21 05:09:00 +00002649 return nullptr;
John McCalleff18842013-03-23 02:35:54 +00002650}
2651
John McCall31168b02011-06-15 23:02:42 +00002652/// Emit an argument that's being passed call-by-writeback. That is,
2653/// we are passing the address of
2654static void emitWritebackArg(CodeGenFunction &CGF, CallArgList &args,
2655 const ObjCIndirectCopyRestoreExpr *CRE) {
John McCalleff18842013-03-23 02:35:54 +00002656 LValue srcLV;
2657
2658 // Make an optimistic effort to emit the address as an l-value.
Eric Christopher2c4555a2015-06-19 01:52:53 +00002659 // This can fail if the argument expression is more complicated.
John McCalleff18842013-03-23 02:35:54 +00002660 if (const Expr *lvExpr = maybeGetUnaryAddrOfOperand(CRE->getSubExpr())) {
2661 srcLV = CGF.EmitLValue(lvExpr);
2662
2663 // Otherwise, just emit it as a scalar.
2664 } else {
2665 llvm::Value *srcAddr = CGF.EmitScalarExpr(CRE->getSubExpr());
2666
2667 QualType srcAddrType =
2668 CRE->getSubExpr()->getType()->castAs<PointerType>()->getPointeeType();
2669 srcLV = CGF.MakeNaturalAlignAddrLValue(srcAddr, srcAddrType);
2670 }
2671 llvm::Value *srcAddr = srcLV.getAddress();
John McCall31168b02011-06-15 23:02:42 +00002672
2673 // The dest and src types don't necessarily match in LLVM terms
2674 // because of the crazy ObjC compatibility rules.
2675
Chris Lattner2192fe52011-07-18 04:24:23 +00002676 llvm::PointerType *destType =
John McCall31168b02011-06-15 23:02:42 +00002677 cast<llvm::PointerType>(CGF.ConvertType(CRE->getType()));
2678
2679 // If the address is a constant null, just pass the appropriate null.
2680 if (isProvablyNull(srcAddr)) {
2681 args.add(RValue::get(llvm::ConstantPointerNull::get(destType)),
2682 CRE->getType());
2683 return;
2684 }
2685
John McCall31168b02011-06-15 23:02:42 +00002686 // Create the temporary.
2687 llvm::Value *temp = CGF.CreateTempAlloca(destType->getElementType(),
2688 "icr.temp");
Fariborz Jahanianfbd19742012-11-27 23:02:53 +00002689 // Loading an l-value can introduce a cleanup if the l-value is __weak,
2690 // and that cleanup will be conditional if we can't prove that the l-value
2691 // isn't null, so we need to register a dominating point so that the cleanups
2692 // system will make valid IR.
2693 CodeGenFunction::ConditionalEvaluation condEval(CGF);
2694
John McCall31168b02011-06-15 23:02:42 +00002695 // Zero-initialize it if we're not doing a copy-initialization.
2696 bool shouldCopy = CRE->shouldCopy();
2697 if (!shouldCopy) {
2698 llvm::Value *null =
2699 llvm::ConstantPointerNull::get(
2700 cast<llvm::PointerType>(destType->getElementType()));
2701 CGF.Builder.CreateStore(null, temp);
2702 }
Craig Topper8a13c412014-05-21 05:09:00 +00002703
2704 llvm::BasicBlock *contBB = nullptr;
2705 llvm::BasicBlock *originBB = nullptr;
John McCall31168b02011-06-15 23:02:42 +00002706
2707 // If the address is *not* known to be non-null, we need to switch.
2708 llvm::Value *finalArgument;
2709
2710 bool provablyNonNull = isProvablyNonNull(srcAddr);
2711 if (provablyNonNull) {
2712 finalArgument = temp;
2713 } else {
2714 llvm::Value *isNull = CGF.Builder.CreateIsNull(srcAddr, "icr.isnull");
2715
2716 finalArgument = CGF.Builder.CreateSelect(isNull,
2717 llvm::ConstantPointerNull::get(destType),
2718 temp, "icr.argument");
2719
2720 // If we need to copy, then the load has to be conditional, which
2721 // means we need control flow.
2722 if (shouldCopy) {
John McCalleff18842013-03-23 02:35:54 +00002723 originBB = CGF.Builder.GetInsertBlock();
John McCall31168b02011-06-15 23:02:42 +00002724 contBB = CGF.createBasicBlock("icr.cont");
2725 llvm::BasicBlock *copyBB = CGF.createBasicBlock("icr.copy");
2726 CGF.Builder.CreateCondBr(isNull, contBB, copyBB);
2727 CGF.EmitBlock(copyBB);
Fariborz Jahanianfbd19742012-11-27 23:02:53 +00002728 condEval.begin(CGF);
John McCall31168b02011-06-15 23:02:42 +00002729 }
2730 }
2731
Craig Topper8a13c412014-05-21 05:09:00 +00002732 llvm::Value *valueToUse = nullptr;
John McCalleff18842013-03-23 02:35:54 +00002733
John McCall31168b02011-06-15 23:02:42 +00002734 // Perform a copy if necessary.
2735 if (shouldCopy) {
Nick Lewycky2d84e842013-10-02 02:29:49 +00002736 RValue srcRV = CGF.EmitLoadOfLValue(srcLV, SourceLocation());
John McCall31168b02011-06-15 23:02:42 +00002737 assert(srcRV.isScalar());
2738
2739 llvm::Value *src = srcRV.getScalarVal();
2740 src = CGF.Builder.CreateBitCast(src, destType->getElementType(),
2741 "icr.cast");
2742
2743 // Use an ordinary store, not a store-to-lvalue.
2744 CGF.Builder.CreateStore(src, temp);
John McCalleff18842013-03-23 02:35:54 +00002745
2746 // If optimization is enabled, and the value was held in a
2747 // __strong variable, we need to tell the optimizer that this
2748 // value has to stay alive until we're doing the store back.
2749 // This is because the temporary is effectively unretained,
2750 // and so otherwise we can violate the high-level semantics.
2751 if (CGF.CGM.getCodeGenOpts().OptimizationLevel != 0 &&
2752 srcLV.getObjCLifetime() == Qualifiers::OCL_Strong) {
2753 valueToUse = src;
2754 }
John McCall31168b02011-06-15 23:02:42 +00002755 }
Fariborz Jahanianfbd19742012-11-27 23:02:53 +00002756
John McCall31168b02011-06-15 23:02:42 +00002757 // Finish the control flow if we needed it.
Fariborz Jahanianfbd19742012-11-27 23:02:53 +00002758 if (shouldCopy && !provablyNonNull) {
John McCalleff18842013-03-23 02:35:54 +00002759 llvm::BasicBlock *copyBB = CGF.Builder.GetInsertBlock();
John McCall31168b02011-06-15 23:02:42 +00002760 CGF.EmitBlock(contBB);
John McCalleff18842013-03-23 02:35:54 +00002761
2762 // Make a phi for the value to intrinsically use.
2763 if (valueToUse) {
2764 llvm::PHINode *phiToUse = CGF.Builder.CreatePHI(valueToUse->getType(), 2,
2765 "icr.to-use");
2766 phiToUse->addIncoming(valueToUse, copyBB);
2767 phiToUse->addIncoming(llvm::UndefValue::get(valueToUse->getType()),
2768 originBB);
2769 valueToUse = phiToUse;
2770 }
2771
Fariborz Jahanianfbd19742012-11-27 23:02:53 +00002772 condEval.end(CGF);
2773 }
John McCall31168b02011-06-15 23:02:42 +00002774
John McCalleff18842013-03-23 02:35:54 +00002775 args.addWriteback(srcLV, temp, valueToUse);
John McCall31168b02011-06-15 23:02:42 +00002776 args.add(RValue::get(finalArgument), CRE->getType());
2777}
2778
Reid Kleckner314ef7b2014-02-01 00:04:45 +00002779void CallArgList::allocateArgumentMemory(CodeGenFunction &CGF) {
2780 assert(!StackBase && !StackCleanup.isValid());
2781
2782 // Save the stack.
2783 llvm::Function *F = CGF.CGM.getIntrinsic(llvm::Intrinsic::stacksave);
David Blaikie43f9bb72015-05-18 22:14:03 +00002784 StackBase = CGF.Builder.CreateCall(F, {}, "inalloca.save");
Reid Kleckner314ef7b2014-02-01 00:04:45 +00002785
2786 // Control gets really tied up in landing pads, so we have to spill the
2787 // stacksave to an alloca to avoid violating SSA form.
2788 // TODO: This is dead if we never emit the cleanup. We should create the
2789 // alloca and store lazily on the first cleanup emission.
2790 StackBaseMem = CGF.CreateTempAlloca(CGF.Int8PtrTy, "inalloca.spmem");
2791 CGF.Builder.CreateStore(StackBase, StackBaseMem);
Nico Weber8cdb3f92015-08-25 18:43:32 +00002792 CGF.pushStackRestore(EHCleanup, StackBaseMem);
Reid Kleckner314ef7b2014-02-01 00:04:45 +00002793 StackCleanup = CGF.EHStack.getInnermostEHScope();
2794 assert(StackCleanup.isValid());
2795}
2796
Nico Weber8cdb3f92015-08-25 18:43:32 +00002797void CallArgList::freeArgumentMemory(CodeGenFunction &CGF) const {
2798 if (StackBase) {
2799 CGF.DeactivateCleanupBlock(StackCleanup, StackBase);
2800 llvm::Value *F = CGF.CGM.getIntrinsic(llvm::Intrinsic::stackrestore);
2801 // We could load StackBase from StackBaseMem, but in the non-exceptional
2802 // case we can skip it.
2803 CGF.Builder.CreateCall(F, StackBase);
2804 }
2805}
2806
Nuno Lopes1ba2d782015-05-30 16:11:40 +00002807void CodeGenFunction::EmitNonNullArgCheck(RValue RV, QualType ArgType,
2808 SourceLocation ArgLoc,
2809 const FunctionDecl *FD,
2810 unsigned ParmNum) {
2811 if (!SanOpts.has(SanitizerKind::NonnullAttribute) || !FD)
Alexey Samsonov8e1162c2014-09-08 17:22:45 +00002812 return;
2813 auto PVD = ParmNum < FD->getNumParams() ? FD->getParamDecl(ParmNum) : nullptr;
2814 unsigned ArgNo = PVD ? PVD->getFunctionScopeIndex() : ParmNum;
2815 auto NNAttr = getNonNullAttr(FD, PVD, ArgType, ArgNo);
2816 if (!NNAttr)
2817 return;
Nuno Lopes1ba2d782015-05-30 16:11:40 +00002818 SanitizerScope SanScope(this);
Alexey Samsonov8e1162c2014-09-08 17:22:45 +00002819 assert(RV.isScalar());
2820 llvm::Value *V = RV.getScalarVal();
2821 llvm::Value *Cond =
Nuno Lopes1ba2d782015-05-30 16:11:40 +00002822 Builder.CreateICmpNE(V, llvm::Constant::getNullValue(V->getType()));
Alexey Samsonov8e1162c2014-09-08 17:22:45 +00002823 llvm::Constant *StaticData[] = {
Nuno Lopes1ba2d782015-05-30 16:11:40 +00002824 EmitCheckSourceLocation(ArgLoc),
2825 EmitCheckSourceLocation(NNAttr->getLocation()),
2826 llvm::ConstantInt::get(Int32Ty, ArgNo + 1),
Alexey Samsonov8e1162c2014-09-08 17:22:45 +00002827 };
Nuno Lopes1ba2d782015-05-30 16:11:40 +00002828 EmitCheck(std::make_pair(Cond, SanitizerKind::NonnullAttribute),
Alexey Samsonove396bfc2014-11-11 22:03:54 +00002829 "nonnull_arg", StaticData, None);
Alexey Samsonov8e1162c2014-09-08 17:22:45 +00002830}
2831
David Blaikief05779e2015-07-21 18:37:18 +00002832void CodeGenFunction::EmitCallArgs(
2833 CallArgList &Args, ArrayRef<QualType> ArgTypes,
2834 llvm::iterator_range<CallExpr::const_arg_iterator> ArgRange,
2835 const FunctionDecl *CalleeDecl, unsigned ParamsToSkip) {
2836 assert((int)ArgTypes.size() == (ArgRange.end() - ArgRange.begin()));
Reid Kleckner739756c2013-12-04 19:23:12 +00002837 // We *have* to evaluate arguments from right to left in the MS C++ ABI,
2838 // because arguments are destroyed left to right in the callee.
2839 if (CGM.getTarget().getCXXABI().areArgsDestroyedLeftToRightInCallee()) {
Reid Kleckner314ef7b2014-02-01 00:04:45 +00002840 // Insert a stack save if we're going to need any inalloca args.
2841 bool HasInAllocaArgs = false;
2842 for (ArrayRef<QualType>::iterator I = ArgTypes.begin(), E = ArgTypes.end();
2843 I != E && !HasInAllocaArgs; ++I)
2844 HasInAllocaArgs = isInAllocaArgument(CGM.getCXXABI(), *I);
2845 if (HasInAllocaArgs) {
2846 assert(getTarget().getTriple().getArch() == llvm::Triple::x86);
2847 Args.allocateArgumentMemory(*this);
2848 }
2849
2850 // Evaluate each argument.
Reid Kleckner739756c2013-12-04 19:23:12 +00002851 size_t CallArgsStart = Args.size();
2852 for (int I = ArgTypes.size() - 1; I >= 0; --I) {
David Blaikief05779e2015-07-21 18:37:18 +00002853 CallExpr::const_arg_iterator Arg = ArgRange.begin() + I;
Reid Kleckner739756c2013-12-04 19:23:12 +00002854 EmitCallArg(Args, *Arg, ArgTypes[I]);
Benjamin Kramerf48ee442015-07-18 14:35:53 +00002855 EmitNonNullArgCheck(Args.back().RV, ArgTypes[I], (*Arg)->getExprLoc(),
Alexey Samsonov8e1162c2014-09-08 17:22:45 +00002856 CalleeDecl, ParamsToSkip + I);
Reid Kleckner739756c2013-12-04 19:23:12 +00002857 }
2858
2859 // Un-reverse the arguments we just evaluated so they match up with the LLVM
2860 // IR function.
2861 std::reverse(Args.begin() + CallArgsStart, Args.end());
2862 return;
2863 }
2864
2865 for (unsigned I = 0, E = ArgTypes.size(); I != E; ++I) {
David Blaikief05779e2015-07-21 18:37:18 +00002866 CallExpr::const_arg_iterator Arg = ArgRange.begin() + I;
2867 assert(Arg != ArgRange.end());
Reid Kleckner739756c2013-12-04 19:23:12 +00002868 EmitCallArg(Args, *Arg, ArgTypes[I]);
Benjamin Kramerf48ee442015-07-18 14:35:53 +00002869 EmitNonNullArgCheck(Args.back().RV, ArgTypes[I], (*Arg)->getExprLoc(),
Alexey Samsonov8e1162c2014-09-08 17:22:45 +00002870 CalleeDecl, ParamsToSkip + I);
Reid Kleckner739756c2013-12-04 19:23:12 +00002871 }
2872}
2873
Reid Kleckner314ef7b2014-02-01 00:04:45 +00002874namespace {
2875
David Blaikie7e70d682015-08-18 22:40:54 +00002876struct DestroyUnpassedArg final : EHScopeStack::Cleanup {
Reid Kleckner314ef7b2014-02-01 00:04:45 +00002877 DestroyUnpassedArg(llvm::Value *Addr, QualType Ty)
2878 : Addr(Addr), Ty(Ty) {}
2879
2880 llvm::Value *Addr;
2881 QualType Ty;
2882
Craig Topper4f12f102014-03-12 06:41:41 +00002883 void Emit(CodeGenFunction &CGF, Flags flags) override {
Reid Kleckner314ef7b2014-02-01 00:04:45 +00002884 const CXXDestructorDecl *Dtor = Ty->getAsCXXRecordDecl()->getDestructor();
2885 assert(!Dtor->isTrivial());
2886 CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete, /*for vbase*/ false,
2887 /*Delegating=*/false, Addr);
2888 }
2889};
2890
Alexander Kornienkoab9db512015-06-22 23:07:51 +00002891}
Reid Kleckner314ef7b2014-02-01 00:04:45 +00002892
David Blaikie38b25912015-02-09 19:13:51 +00002893struct DisableDebugLocationUpdates {
2894 CodeGenFunction &CGF;
2895 bool disabledDebugInfo;
2896 DisableDebugLocationUpdates(CodeGenFunction &CGF, const Expr *E) : CGF(CGF) {
2897 if ((disabledDebugInfo = isa<CXXDefaultArgExpr>(E) && CGF.getDebugInfo()))
2898 CGF.disableDebugInfo();
2899 }
2900 ~DisableDebugLocationUpdates() {
2901 if (disabledDebugInfo)
2902 CGF.enableDebugInfo();
2903 }
2904};
2905
John McCall32ea9692011-03-11 20:59:21 +00002906void CodeGenFunction::EmitCallArg(CallArgList &args, const Expr *E,
2907 QualType type) {
David Blaikie38b25912015-02-09 19:13:51 +00002908 DisableDebugLocationUpdates Dis(*this, E);
John McCall31168b02011-06-15 23:02:42 +00002909 if (const ObjCIndirectCopyRestoreExpr *CRE
2910 = dyn_cast<ObjCIndirectCopyRestoreExpr>(E)) {
Richard Smith9c6890a2012-11-01 22:30:59 +00002911 assert(getLangOpts().ObjCAutoRefCount);
John McCall31168b02011-06-15 23:02:42 +00002912 assert(getContext().hasSameType(E->getType(), type));
2913 return emitWritebackArg(*this, args, CRE);
2914 }
2915
John McCall0a76c0c2011-08-26 18:42:59 +00002916 assert(type->isReferenceType() == E->isGLValue() &&
2917 "reference binding to unmaterialized r-value!");
2918
John McCall17054bd62011-08-26 21:08:13 +00002919 if (E->isGLValue()) {
2920 assert(E->getObjectKind() == OK_Ordinary);
Richard Smitha1c9d4d2013-06-12 23:38:09 +00002921 return args.add(EmitReferenceBindingToExpr(E), type);
John McCall17054bd62011-08-26 21:08:13 +00002922 }
Mike Stump11289f42009-09-09 15:08:12 +00002923
Reid Kleckner23f4c4b2013-06-21 12:45:15 +00002924 bool HasAggregateEvalKind = hasAggregateEvaluationKind(type);
2925
2926 // In the Microsoft C++ ABI, aggregate arguments are destructed by the callee.
2927 // However, we still have to push an EH-only cleanup in case we unwind before
2928 // we make it to the call.
Reid Klecknerac640602014-05-01 03:07:18 +00002929 if (HasAggregateEvalKind &&
2930 CGM.getTarget().getCXXABI().areArgsDestroyedLeftToRightInCallee()) {
2931 // If we're using inalloca, use the argument memory. Otherwise, use a
Reid Klecknere39ee212014-05-03 00:33:28 +00002932 // temporary.
Reid Klecknerac640602014-05-01 03:07:18 +00002933 AggValueSlot Slot;
2934 if (args.isUsingInAlloca())
2935 Slot = createPlaceholderSlot(*this, type);
2936 else
2937 Slot = CreateAggTemp(type, "agg.tmp");
Reid Klecknere39ee212014-05-03 00:33:28 +00002938
2939 const CXXRecordDecl *RD = type->getAsCXXRecordDecl();
2940 bool DestroyedInCallee =
2941 RD && RD->hasNonTrivialDestructor() &&
2942 CGM.getCXXABI().getRecordArgABI(RD) != CGCXXABI::RAA_Default;
2943 if (DestroyedInCallee)
2944 Slot.setExternallyDestructed();
2945
Reid Kleckner314ef7b2014-02-01 00:04:45 +00002946 EmitAggExpr(E, Slot);
2947 RValue RV = Slot.asRValue();
2948 args.add(RV, type);
Reid Kleckner23f4c4b2013-06-21 12:45:15 +00002949
Reid Klecknere39ee212014-05-03 00:33:28 +00002950 if (DestroyedInCallee) {
Reid Kleckner314ef7b2014-02-01 00:04:45 +00002951 // Create a no-op GEP between the placeholder and the cleanup so we can
2952 // RAUW it successfully. It also serves as a marker of the first
2953 // instruction where the cleanup is active.
2954 pushFullExprCleanup<DestroyUnpassedArg>(EHCleanup, Slot.getAddr(), type);
Reid Kleckner23f4c4b2013-06-21 12:45:15 +00002955 // This unreachable is a temporary marker which will be removed later.
2956 llvm::Instruction *IsActive = Builder.CreateUnreachable();
2957 args.addArgCleanupDeactivation(EHStack.getInnermostEHScope(), IsActive);
Reid Kleckner23f4c4b2013-06-21 12:45:15 +00002958 }
Reid Kleckner314ef7b2014-02-01 00:04:45 +00002959 return;
Reid Kleckner23f4c4b2013-06-21 12:45:15 +00002960 }
2961
2962 if (HasAggregateEvalKind && isa<ImplicitCastExpr>(E) &&
Eli Friedmandf968192011-05-26 00:10:27 +00002963 cast<CastExpr>(E)->getCastKind() == CK_LValueToRValue) {
2964 LValue L = EmitLValue(cast<CastExpr>(E)->getSubExpr());
2965 assert(L.isSimple());
Eli Friedman61f615a2013-06-11 01:08:22 +00002966 if (L.getAlignment() >= getContext().getTypeAlignInChars(type)) {
2967 args.add(L.asAggregateRValue(), type, /*NeedsCopy*/true);
2968 } else {
2969 // We can't represent a misaligned lvalue in the CallArgList, so copy
2970 // to an aligned temporary now.
2971 llvm::Value *tmp = CreateMemTemp(type);
2972 EmitAggregateCopy(tmp, L.getAddress(), type, L.isVolatile(),
2973 L.getAlignment());
2974 args.add(RValue::getAggregate(tmp), type);
2975 }
Eli Friedmandf968192011-05-26 00:10:27 +00002976 return;
2977 }
2978
John McCall32ea9692011-03-11 20:59:21 +00002979 args.add(EmitAnyExprToTemp(E), type);
Anders Carlsson60ce3fe2009-04-08 20:47:54 +00002980}
2981
Reid Kleckner79b0fd72014-10-10 00:05:45 +00002982QualType CodeGenFunction::getVarArgType(const Expr *Arg) {
2983 // System headers on Windows define NULL to 0 instead of 0LL on Win64. MSVC
2984 // implicitly widens null pointer constants that are arguments to varargs
2985 // functions to pointer-sized ints.
2986 if (!getTarget().getTriple().isOSWindows())
2987 return Arg->getType();
2988
2989 if (Arg->getType()->isIntegerType() &&
2990 getContext().getTypeSize(Arg->getType()) <
2991 getContext().getTargetInfo().getPointerWidth(0) &&
2992 Arg->isNullPointerConstant(getContext(),
2993 Expr::NPC_ValueDependentIsNotNull)) {
2994 return getContext().getIntPtrType();
2995 }
2996
2997 return Arg->getType();
2998}
2999
Dan Gohman515a60d2012-02-16 00:57:37 +00003000// In ObjC ARC mode with no ObjC ARC exception safety, tell the ARC
3001// optimizer it can aggressively ignore unwind edges.
3002void
3003CodeGenFunction::AddObjCARCExceptionMetadata(llvm::Instruction *Inst) {
3004 if (CGM.getCodeGenOpts().OptimizationLevel != 0 &&
3005 !CGM.getCodeGenOpts().ObjCAutoRefCountExceptions)
3006 Inst->setMetadata("clang.arc.no_objc_arc_exceptions",
3007 CGM.getNoObjCARCExceptionsMetadata());
3008}
3009
John McCall882987f2013-02-28 19:01:20 +00003010/// Emits a call to the given no-arguments nounwind runtime function.
3011llvm::CallInst *
3012CodeGenFunction::EmitNounwindRuntimeCall(llvm::Value *callee,
3013 const llvm::Twine &name) {
Craig Topper5fc8fc22014-08-27 06:28:36 +00003014 return EmitNounwindRuntimeCall(callee, None, name);
John McCall882987f2013-02-28 19:01:20 +00003015}
3016
3017/// Emits a call to the given nounwind runtime function.
3018llvm::CallInst *
3019CodeGenFunction::EmitNounwindRuntimeCall(llvm::Value *callee,
3020 ArrayRef<llvm::Value*> args,
3021 const llvm::Twine &name) {
3022 llvm::CallInst *call = EmitRuntimeCall(callee, args, name);
3023 call->setDoesNotThrow();
3024 return call;
3025}
3026
3027/// Emits a simple call (never an invoke) to the given no-arguments
3028/// runtime function.
3029llvm::CallInst *
3030CodeGenFunction::EmitRuntimeCall(llvm::Value *callee,
3031 const llvm::Twine &name) {
Craig Topper5fc8fc22014-08-27 06:28:36 +00003032 return EmitRuntimeCall(callee, None, name);
John McCall882987f2013-02-28 19:01:20 +00003033}
3034
3035/// Emits a simple call (never an invoke) to the given runtime
3036/// function.
3037llvm::CallInst *
3038CodeGenFunction::EmitRuntimeCall(llvm::Value *callee,
3039 ArrayRef<llvm::Value*> args,
3040 const llvm::Twine &name) {
3041 llvm::CallInst *call = Builder.CreateCall(callee, args, name);
3042 call->setCallingConv(getRuntimeCC());
3043 return call;
3044}
3045
3046/// Emits a call or invoke to the given noreturn runtime function.
3047void CodeGenFunction::EmitNoreturnRuntimeCallOrInvoke(llvm::Value *callee,
3048 ArrayRef<llvm::Value*> args) {
3049 if (getInvokeDest()) {
3050 llvm::InvokeInst *invoke =
3051 Builder.CreateInvoke(callee,
3052 getUnreachableBlock(),
3053 getInvokeDest(),
3054 args);
3055 invoke->setDoesNotReturn();
3056 invoke->setCallingConv(getRuntimeCC());
3057 } else {
3058 llvm::CallInst *call = Builder.CreateCall(callee, args);
3059 call->setDoesNotReturn();
3060 call->setCallingConv(getRuntimeCC());
3061 Builder.CreateUnreachable();
3062 }
3063}
3064
3065/// Emits a call or invoke instruction to the given nullary runtime
3066/// function.
3067llvm::CallSite
3068CodeGenFunction::EmitRuntimeCallOrInvoke(llvm::Value *callee,
3069 const Twine &name) {
Craig Topper5fc8fc22014-08-27 06:28:36 +00003070 return EmitRuntimeCallOrInvoke(callee, None, name);
John McCall882987f2013-02-28 19:01:20 +00003071}
3072
3073/// Emits a call or invoke instruction to the given runtime function.
3074llvm::CallSite
3075CodeGenFunction::EmitRuntimeCallOrInvoke(llvm::Value *callee,
3076 ArrayRef<llvm::Value*> args,
3077 const Twine &name) {
3078 llvm::CallSite callSite = EmitCallOrInvoke(callee, args, name);
3079 callSite.setCallingConv(getRuntimeCC());
3080 return callSite;
3081}
3082
3083llvm::CallSite
3084CodeGenFunction::EmitCallOrInvoke(llvm::Value *Callee,
3085 const Twine &Name) {
Craig Topper5fc8fc22014-08-27 06:28:36 +00003086 return EmitCallOrInvoke(Callee, None, Name);
John McCall882987f2013-02-28 19:01:20 +00003087}
3088
John McCallbd309292010-07-06 01:34:17 +00003089/// Emits a call or invoke instruction to the given function, depending
3090/// on the current state of the EH stack.
3091llvm::CallSite
3092CodeGenFunction::EmitCallOrInvoke(llvm::Value *Callee,
Chris Lattner54b16772011-07-23 17:14:25 +00003093 ArrayRef<llvm::Value *> Args,
Chris Lattner0e62c1c2011-07-23 10:55:15 +00003094 const Twine &Name) {
John McCallbd309292010-07-06 01:34:17 +00003095 llvm::BasicBlock *InvokeDest = getInvokeDest();
John McCallbd309292010-07-06 01:34:17 +00003096
Dan Gohman515a60d2012-02-16 00:57:37 +00003097 llvm::Instruction *Inst;
3098 if (!InvokeDest)
3099 Inst = Builder.CreateCall(Callee, Args, Name);
3100 else {
3101 llvm::BasicBlock *ContBB = createBasicBlock("invoke.cont");
3102 Inst = Builder.CreateInvoke(Callee, ContBB, InvokeDest, Args, Name);
3103 EmitBlock(ContBB);
3104 }
3105
3106 // In ObjC ARC mode with no ObjC ARC exception safety, tell the ARC
3107 // optimizer it can aggressively ignore unwind edges.
David Blaikiebbafb8a2012-03-11 07:00:24 +00003108 if (CGM.getLangOpts().ObjCAutoRefCount)
Dan Gohman515a60d2012-02-16 00:57:37 +00003109 AddObjCARCExceptionMetadata(Inst);
3110
Benjamin Kramerc19cde12015-04-10 14:49:31 +00003111 return llvm::CallSite(Inst);
John McCallbd309292010-07-06 01:34:17 +00003112}
3113
Reid Kleckner314ef7b2014-02-01 00:04:45 +00003114/// \brief Store a non-aggregate value to an address to initialize it. For
3115/// initialization, a non-atomic store will be used.
3116static void EmitInitStoreOfNonAggregate(CodeGenFunction &CGF, RValue Src,
3117 LValue Dst) {
3118 if (Src.isScalar())
3119 CGF.EmitStoreOfScalar(Src.getScalarVal(), Dst, /*init=*/true);
3120 else
3121 CGF.EmitStoreOfComplex(Src.getComplexVal(), Dst, /*init=*/true);
3122}
3123
3124void CodeGenFunction::deferPlaceholderReplacement(llvm::Instruction *Old,
3125 llvm::Value *New) {
3126 DeferredReplacements.push_back(std::make_pair(Old, New));
3127}
Chris Lattnerd59d8672011-07-12 06:29:11 +00003128
Daniel Dunbard931a872009-02-02 22:03:45 +00003129RValue CodeGenFunction::EmitCall(const CGFunctionInfo &CallInfo,
Mike Stump11289f42009-09-09 15:08:12 +00003130 llvm::Value *Callee,
Anders Carlsson61a401c2009-12-24 19:25:24 +00003131 ReturnValueSlot ReturnValue,
Daniel Dunbarcdbb5e32009-02-20 18:06:48 +00003132 const CallArgList &CallArgs,
David Chisnall9eecafa2010-05-01 11:15:56 +00003133 const Decl *TargetDecl,
David Chisnallff5f88c2010-05-02 13:41:58 +00003134 llvm::Instruction **callOrInvoke) {
Mike Stump18bb9282009-05-16 07:57:57 +00003135 // FIXME: We no longer need the types from CallArgs; lift up and simplify.
Daniel Dunbar613855c2008-09-09 23:27:19 +00003136
3137 // Handle struct-return functions by passing a pointer to the
3138 // location that we would like to return into.
Daniel Dunbar7633cbf2009-02-02 21:43:58 +00003139 QualType RetTy = CallInfo.getReturnType();
Daniel Dunbarb52d0772009-02-03 05:59:18 +00003140 const ABIArgInfo &RetAI = CallInfo.getReturnInfo();
Mike Stump11289f42009-09-09 15:08:12 +00003141
Chris Lattnerbb1952c2011-07-12 04:46:18 +00003142 llvm::FunctionType *IRFuncTy =
3143 cast<llvm::FunctionType>(
3144 cast<llvm::PointerType>(Callee->getType())->getElementType());
Mike Stump11289f42009-09-09 15:08:12 +00003145
Reid Kleckner314ef7b2014-02-01 00:04:45 +00003146 // If we're using inalloca, insert the allocation after the stack save.
3147 // FIXME: Do this earlier rather than hacking it in here!
David Blaikie1ed728c2015-04-05 22:45:47 +00003148 llvm::AllocaInst *ArgMemory = nullptr;
Reid Kleckner314ef7b2014-02-01 00:04:45 +00003149 if (llvm::StructType *ArgStruct = CallInfo.getArgStruct()) {
Reid Kleckner9df1d972014-04-10 01:40:15 +00003150 llvm::Instruction *IP = CallArgs.getStackBase();
3151 llvm::AllocaInst *AI;
3152 if (IP) {
3153 IP = IP->getNextNode();
3154 AI = new llvm::AllocaInst(ArgStruct, "argmem", IP);
3155 } else {
Reid Kleckner966abe72014-05-15 23:01:46 +00003156 AI = CreateTempAlloca(ArgStruct, "argmem");
Reid Kleckner9df1d972014-04-10 01:40:15 +00003157 }
Reid Kleckner314ef7b2014-02-01 00:04:45 +00003158 AI->setUsedWithInAlloca(true);
3159 assert(AI->isUsedWithInAlloca() && !AI->isStaticAlloca());
3160 ArgMemory = AI;
3161 }
3162
Alexey Samsonov153004f2014-09-29 22:08:00 +00003163 ClangToLLVMArgMapping IRFunctionArgs(CGM.getContext(), CallInfo);
Alexey Samsonov91cf4552014-08-22 01:06:06 +00003164 SmallVector<llvm::Value *, 16> IRCallArgs(IRFunctionArgs.totalIRArgs());
3165
Chris Lattner4ca97c32009-06-13 00:26:38 +00003166 // If the call returns a temporary with struct return, create a temporary
Anders Carlsson17490832009-12-24 20:40:36 +00003167 // alloca to hold the result, unless one is given to us.
Craig Topper8a13c412014-05-21 05:09:00 +00003168 llvm::Value *SRetPtr = nullptr;
Leny Kholodov6aab1112015-06-08 10:23:49 +00003169 size_t UnusedReturnSize = 0;
Reid Kleckner37abaca2014-05-09 22:46:15 +00003170 if (RetAI.isIndirect() || RetAI.isInAlloca()) {
Reid Kleckner314ef7b2014-02-01 00:04:45 +00003171 SRetPtr = ReturnValue.getValue();
Leny Kholodov6aab1112015-06-08 10:23:49 +00003172 if (!SRetPtr) {
Reid Kleckner314ef7b2014-02-01 00:04:45 +00003173 SRetPtr = CreateMemTemp(RetTy);
Leny Kholodov6aab1112015-06-08 10:23:49 +00003174 if (HaveInsertPoint() && ReturnValue.isUnused()) {
3175 uint64_t size =
3176 CGM.getDataLayout().getTypeAllocSize(ConvertTypeForMem(RetTy));
3177 if (EmitLifetimeStart(size, SRetPtr))
3178 UnusedReturnSize = size;
3179 }
3180 }
Alexey Samsonov91cf4552014-08-22 01:06:06 +00003181 if (IRFunctionArgs.hasSRetArg()) {
3182 IRCallArgs[IRFunctionArgs.getSRetArgNo()] = SRetPtr;
Reid Kleckner314ef7b2014-02-01 00:04:45 +00003183 } else {
3184 llvm::Value *Addr =
David Blaikie2e804282015-04-05 22:47:07 +00003185 Builder.CreateStructGEP(ArgMemory->getAllocatedType(), ArgMemory,
3186 RetAI.getInAllocaFieldIndex());
Reid Kleckner314ef7b2014-02-01 00:04:45 +00003187 Builder.CreateStore(SRetPtr, Addr);
3188 }
Anders Carlsson17490832009-12-24 20:40:36 +00003189 }
Mike Stump11289f42009-09-09 15:08:12 +00003190
Daniel Dunbara45bdbb2009-02-04 21:17:21 +00003191 assert(CallInfo.arg_size() == CallArgs.size() &&
3192 "Mismatch between function signature & arguments.");
Alexey Samsonov91cf4552014-08-22 01:06:06 +00003193 unsigned ArgNo = 0;
Daniel Dunbarb52d0772009-02-03 05:59:18 +00003194 CGFunctionInfo::const_arg_iterator info_it = CallInfo.arg_begin();
Mike Stump11289f42009-09-09 15:08:12 +00003195 for (CallArgList::const_iterator I = CallArgs.begin(), E = CallArgs.end();
Alexey Samsonov91cf4552014-08-22 01:06:06 +00003196 I != E; ++I, ++info_it, ++ArgNo) {
Daniel Dunbarb52d0772009-02-03 05:59:18 +00003197 const ABIArgInfo &ArgInfo = info_it->info;
Eli Friedmanf4258eb2011-05-02 18:05:27 +00003198 RValue RV = I->RV;
Daniel Dunbar8fc81b02008-09-17 00:51:38 +00003199
John McCall47fb9502013-03-07 21:37:08 +00003200 CharUnits TypeAlign = getContext().getTypeAlignInChars(I->Ty);
Rafael Espindolafad28de2012-10-24 01:59:00 +00003201
3202 // Insert a padding argument to ensure proper alignment.
Alexey Samsonov91cf4552014-08-22 01:06:06 +00003203 if (IRFunctionArgs.hasPaddingArg(ArgNo))
3204 IRCallArgs[IRFunctionArgs.getPaddingArgNo(ArgNo)] =
3205 llvm::UndefValue::get(ArgInfo.getPaddingType());
3206
3207 unsigned FirstIRArg, NumIRArgs;
3208 std::tie(FirstIRArg, NumIRArgs) = IRFunctionArgs.getIRArgs(ArgNo);
Rafael Espindolafad28de2012-10-24 01:59:00 +00003209
Daniel Dunbar8fc81b02008-09-17 00:51:38 +00003210 switch (ArgInfo.getKind()) {
Reid Kleckner314ef7b2014-02-01 00:04:45 +00003211 case ABIArgInfo::InAlloca: {
Alexey Samsonov91cf4552014-08-22 01:06:06 +00003212 assert(NumIRArgs == 0);
Reid Kleckner314ef7b2014-02-01 00:04:45 +00003213 assert(getTarget().getTriple().getArch() == llvm::Triple::x86);
3214 if (RV.isAggregate()) {
3215 // Replace the placeholder with the appropriate argument slot GEP.
3216 llvm::Instruction *Placeholder =
3217 cast<llvm::Instruction>(RV.getAggregateAddr());
3218 CGBuilderTy::InsertPoint IP = Builder.saveIP();
3219 Builder.SetInsertPoint(Placeholder);
David Blaikie2e804282015-04-05 22:47:07 +00003220 llvm::Value *Addr =
3221 Builder.CreateStructGEP(ArgMemory->getAllocatedType(), ArgMemory,
3222 ArgInfo.getInAllocaFieldIndex());
Reid Kleckner314ef7b2014-02-01 00:04:45 +00003223 Builder.restoreIP(IP);
3224 deferPlaceholderReplacement(Placeholder, Addr);
3225 } else {
3226 // Store the RValue into the argument struct.
3227 llvm::Value *Addr =
David Blaikie2e804282015-04-05 22:47:07 +00003228 Builder.CreateStructGEP(ArgMemory->getAllocatedType(), ArgMemory,
3229 ArgInfo.getInAllocaFieldIndex());
David Majnemer32b57b02014-03-31 16:12:47 +00003230 unsigned AS = Addr->getType()->getPointerAddressSpace();
3231 llvm::Type *MemType = ConvertTypeForMem(I->Ty)->getPointerTo(AS);
3232 // There are some cases where a trivial bitcast is not avoidable. The
3233 // definition of a type later in a translation unit may change it's type
3234 // from {}* to (%struct.foo*)*.
3235 if (Addr->getType() != MemType)
3236 Addr = Builder.CreateBitCast(Addr, MemType);
Reid Kleckner314ef7b2014-02-01 00:04:45 +00003237 LValue argLV = MakeAddrLValue(Addr, I->Ty, TypeAlign);
3238 EmitInitStoreOfNonAggregate(*this, RV, argLV);
3239 }
Alexey Samsonov91cf4552014-08-22 01:06:06 +00003240 break;
Reid Kleckner314ef7b2014-02-01 00:04:45 +00003241 }
3242
Daniel Dunbar03816342010-08-21 02:24:36 +00003243 case ABIArgInfo::Indirect: {
Alexey Samsonov91cf4552014-08-22 01:06:06 +00003244 assert(NumIRArgs == 1);
Daniel Dunbar747865a2009-02-05 09:16:39 +00003245 if (RV.isScalar() || RV.isComplex()) {
3246 // Make a temporary alloca to pass the argument.
Eli Friedman7e68c882011-06-15 18:26:32 +00003247 llvm::AllocaInst *AI = CreateMemTemp(I->Ty);
3248 if (ArgInfo.getIndirectAlign() > AI->getAlignment())
3249 AI->setAlignment(ArgInfo.getIndirectAlign());
Alexey Samsonov91cf4552014-08-22 01:06:06 +00003250 IRCallArgs[FirstIRArg] = AI;
John McCall47fb9502013-03-07 21:37:08 +00003251
Alexey Samsonov91cf4552014-08-22 01:06:06 +00003252 LValue argLV = MakeAddrLValue(AI, I->Ty, TypeAlign);
Reid Kleckner314ef7b2014-02-01 00:04:45 +00003253 EmitInitStoreOfNonAggregate(*this, RV, argLV);
Daniel Dunbar747865a2009-02-05 09:16:39 +00003254 } else {
Eli Friedmaneb7fab62011-06-14 01:37:52 +00003255 // We want to avoid creating an unnecessary temporary+copy here;
Guy Benyei3832bfd2013-03-10 12:59:00 +00003256 // however, we need one in three cases:
Eli Friedmaneb7fab62011-06-14 01:37:52 +00003257 // 1. If the argument is not byval, and we are required to copy the
3258 // source. (This case doesn't occur on any common architecture.)
3259 // 2. If the argument is byval, RV is not sufficiently aligned, and
3260 // we cannot force it to be sufficiently aligned.
Guy Benyei3832bfd2013-03-10 12:59:00 +00003261 // 3. If the argument is byval, but RV is located in an address space
3262 // different than that of the argument (0).
Eli Friedmanf7456192011-06-15 22:09:18 +00003263 llvm::Value *Addr = RV.getAggregateAddr();
3264 unsigned Align = ArgInfo.getIndirectAlign();
Micah Villmowdd31ca12012-10-08 16:25:52 +00003265 const llvm::DataLayout *TD = &CGM.getDataLayout();
Guy Benyei3832bfd2013-03-10 12:59:00 +00003266 const unsigned RVAddrSpace = Addr->getType()->getPointerAddressSpace();
Alexey Samsonov91cf4552014-08-22 01:06:06 +00003267 const unsigned ArgAddrSpace =
3268 (FirstIRArg < IRFuncTy->getNumParams()
3269 ? IRFuncTy->getParamType(FirstIRArg)->getPointerAddressSpace()
3270 : 0);
Eli Friedmanf7456192011-06-15 22:09:18 +00003271 if ((!ArgInfo.getIndirectByVal() && I->NeedsCopy) ||
John McCall47fb9502013-03-07 21:37:08 +00003272 (ArgInfo.getIndirectByVal() && TypeAlign.getQuantity() < Align &&
Mehdi Aminib3d52092015-03-10 02:36:43 +00003273 llvm::getOrEnforceKnownAlignment(Addr, Align, *TD) < Align) ||
3274 (ArgInfo.getIndirectByVal() && (RVAddrSpace != ArgAddrSpace))) {
Eli Friedmaneb7fab62011-06-14 01:37:52 +00003275 // Create an aligned temporary, and copy to it.
Eli Friedmanf7456192011-06-15 22:09:18 +00003276 llvm::AllocaInst *AI = CreateMemTemp(I->Ty);
3277 if (Align > AI->getAlignment())
3278 AI->setAlignment(Align);
Alexey Samsonov91cf4552014-08-22 01:06:06 +00003279 IRCallArgs[FirstIRArg] = AI;
Chad Rosier615ed1a2012-03-29 17:37:10 +00003280 EmitAggregateCopy(AI, Addr, I->Ty, RV.isVolatileQualified());
Eli Friedmaneb7fab62011-06-14 01:37:52 +00003281 } else {
3282 // Skip the extra memcpy call.
Alexey Samsonov91cf4552014-08-22 01:06:06 +00003283 IRCallArgs[FirstIRArg] = Addr;
Eli Friedmaneb7fab62011-06-14 01:37:52 +00003284 }
Daniel Dunbar747865a2009-02-05 09:16:39 +00003285 }
3286 break;
Daniel Dunbar03816342010-08-21 02:24:36 +00003287 }
Daniel Dunbar747865a2009-02-05 09:16:39 +00003288
Daniel Dunbar94a6f252009-01-26 21:26:08 +00003289 case ABIArgInfo::Ignore:
Alexey Samsonov91cf4552014-08-22 01:06:06 +00003290 assert(NumIRArgs == 0);
Daniel Dunbar94a6f252009-01-26 21:26:08 +00003291 break;
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00003292
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00003293 case ABIArgInfo::Extend:
3294 case ABIArgInfo::Direct: {
3295 if (!isa<llvm::StructType>(ArgInfo.getCoerceToType()) &&
Chris Lattner8a2f3c72010-07-30 04:02:24 +00003296 ArgInfo.getCoerceToType() == ConvertType(info_it->type) &&
3297 ArgInfo.getDirectOffset() == 0) {
Alexey Samsonov91cf4552014-08-22 01:06:06 +00003298 assert(NumIRArgs == 1);
Chris Lattnerbb1952c2011-07-12 04:46:18 +00003299 llvm::Value *V;
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00003300 if (RV.isScalar())
Chris Lattnerbb1952c2011-07-12 04:46:18 +00003301 V = RV.getScalarVal();
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00003302 else
Chris Lattnerbb1952c2011-07-12 04:46:18 +00003303 V = Builder.CreateLoad(RV.getAggregateAddr());
Alexey Samsonov91cf4552014-08-22 01:06:06 +00003304
Reid Kleckner79b0fd72014-10-10 00:05:45 +00003305 // We might have to widen integers, but we should never truncate.
3306 if (ArgInfo.getCoerceToType() != V->getType() &&
3307 V->getType()->isIntegerTy())
3308 V = Builder.CreateZExt(V, ArgInfo.getCoerceToType());
3309
Chris Lattner3ce86682011-07-12 04:53:39 +00003310 // If the argument doesn't match, perform a bitcast to coerce it. This
3311 // can happen due to trivial type mismatches.
Alexey Samsonov91cf4552014-08-22 01:06:06 +00003312 if (FirstIRArg < IRFuncTy->getNumParams() &&
3313 V->getType() != IRFuncTy->getParamType(FirstIRArg))
3314 V = Builder.CreateBitCast(V, IRFuncTy->getParamType(FirstIRArg));
3315 IRCallArgs[FirstIRArg] = V;
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00003316 break;
3317 }
Daniel Dunbar94a6f252009-01-26 21:26:08 +00003318
Daniel Dunbar2f219b02009-02-03 19:12:28 +00003319 // FIXME: Avoid the conversion through memory if possible.
3320 llvm::Value *SrcPtr;
Ulrich Weigand6e2cea62015-07-10 11:31:43 +00003321 CharUnits SrcAlign;
John McCall47fb9502013-03-07 21:37:08 +00003322 if (RV.isScalar() || RV.isComplex()) {
Eli Friedmanf4258eb2011-05-02 18:05:27 +00003323 SrcPtr = CreateMemTemp(I->Ty, "coerce");
Ulrich Weigand6e2cea62015-07-10 11:31:43 +00003324 SrcAlign = TypeAlign;
John McCall47fb9502013-03-07 21:37:08 +00003325 LValue SrcLV = MakeAddrLValue(SrcPtr, I->Ty, TypeAlign);
Reid Kleckner314ef7b2014-02-01 00:04:45 +00003326 EmitInitStoreOfNonAggregate(*this, RV, SrcLV);
Ulrich Weigand6e2cea62015-07-10 11:31:43 +00003327 } else {
Daniel Dunbar2f219b02009-02-03 19:12:28 +00003328 SrcPtr = RV.getAggregateAddr();
Ulrich Weigand6e2cea62015-07-10 11:31:43 +00003329 // This alignment is guaranteed by EmitCallArg.
3330 SrcAlign = TypeAlign;
3331 }
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00003332
Chris Lattner8a2f3c72010-07-30 04:02:24 +00003333 // If the value is offset in memory, apply the offset now.
3334 if (unsigned Offs = ArgInfo.getDirectOffset()) {
3335 SrcPtr = Builder.CreateBitCast(SrcPtr, Builder.getInt8PtrTy());
David Blaikie5e259a82015-04-03 22:54:16 +00003336 SrcPtr = Builder.CreateConstGEP1_32(Builder.getInt8Ty(), SrcPtr, Offs);
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00003337 SrcPtr = Builder.CreateBitCast(SrcPtr,
Chris Lattner8a2f3c72010-07-30 04:02:24 +00003338 llvm::PointerType::getUnqual(ArgInfo.getCoerceToType()));
Ulrich Weigand6e2cea62015-07-10 11:31:43 +00003339 SrcAlign = SrcAlign.alignmentAtOffset(CharUnits::fromQuantity(Offs));
Chris Lattner8a2f3c72010-07-30 04:02:24 +00003340 }
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00003341
Oliver Stannard2bfdc5b2014-08-27 10:43:15 +00003342 // Fast-isel and the optimizer generally like scalar values better than
3343 // FCAs, so we flatten them if this is safe to do for this argument.
James Molloy6f244b62014-05-09 16:21:39 +00003344 llvm::StructType *STy =
3345 dyn_cast<llvm::StructType>(ArgInfo.getCoerceToType());
Oliver Stannard2bfdc5b2014-08-27 10:43:15 +00003346 if (STy && ArgInfo.isDirect() && ArgInfo.getCanBeFlattened()) {
Chandler Carrutha6399a52012-10-10 11:29:08 +00003347 llvm::Type *SrcTy =
3348 cast<llvm::PointerType>(SrcPtr->getType())->getElementType();
3349 uint64_t SrcSize = CGM.getDataLayout().getTypeAllocSize(SrcTy);
3350 uint64_t DstSize = CGM.getDataLayout().getTypeAllocSize(STy);
3351
3352 // If the source type is smaller than the destination type of the
3353 // coerce-to logic, copy the source value into a temp alloca the size
3354 // of the destination type to allow loading all of it. The bits past
3355 // the source value are left undef.
3356 if (SrcSize < DstSize) {
3357 llvm::AllocaInst *TempAlloca
3358 = CreateTempAlloca(STy, SrcPtr->getName() + ".coerce");
3359 Builder.CreateMemCpy(TempAlloca, SrcPtr, SrcSize, 0);
3360 SrcPtr = TempAlloca;
3361 } else {
3362 SrcPtr = Builder.CreateBitCast(SrcPtr,
3363 llvm::PointerType::getUnqual(STy));
3364 }
3365
Alexey Samsonov91cf4552014-08-22 01:06:06 +00003366 assert(NumIRArgs == STy->getNumElements());
Chris Lattnerceddafb2010-07-05 20:41:41 +00003367 for (unsigned i = 0, e = STy->getNumElements(); i != e; ++i) {
David Blaikie17ea2662015-04-04 21:07:17 +00003368 llvm::Value *EltPtr = Builder.CreateConstGEP2_32(STy, SrcPtr, 0, i);
Chris Lattnerff941a62010-07-28 18:24:28 +00003369 llvm::LoadInst *LI = Builder.CreateLoad(EltPtr);
3370 // We don't know what we're loading from.
3371 LI->setAlignment(1);
Alexey Samsonov91cf4552014-08-22 01:06:06 +00003372 IRCallArgs[FirstIRArg + i] = LI;
Chris Lattner15ec3612010-06-29 00:06:42 +00003373 }
Chris Lattner3dd716c2010-06-28 23:44:11 +00003374 } else {
Chris Lattner15ec3612010-06-29 00:06:42 +00003375 // In the simple case, just pass the coerced loaded value.
Alexey Samsonov91cf4552014-08-22 01:06:06 +00003376 assert(NumIRArgs == 1);
3377 IRCallArgs[FirstIRArg] =
Ulrich Weigand6e2cea62015-07-10 11:31:43 +00003378 CreateCoercedLoad(SrcPtr, ArgInfo.getCoerceToType(),
3379 SrcAlign, *this);
Chris Lattner3dd716c2010-06-28 23:44:11 +00003380 }
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00003381
Daniel Dunbar2f219b02009-02-03 19:12:28 +00003382 break;
3383 }
3384
Daniel Dunbar8fc81b02008-09-17 00:51:38 +00003385 case ABIArgInfo::Expand:
Alexey Samsonov91cf4552014-08-22 01:06:06 +00003386 unsigned IRArgPos = FirstIRArg;
3387 ExpandTypeToArgs(I->Ty, RV, IRFuncTy, IRCallArgs, IRArgPos);
3388 assert(IRArgPos == FirstIRArg + NumIRArgs);
Daniel Dunbar8fc81b02008-09-17 00:51:38 +00003389 break;
Daniel Dunbar613855c2008-09-09 23:27:19 +00003390 }
3391 }
Mike Stump11289f42009-09-09 15:08:12 +00003392
Reid Kleckner314ef7b2014-02-01 00:04:45 +00003393 if (ArgMemory) {
3394 llvm::Value *Arg = ArgMemory;
Reid Klecknerafba553e2014-07-08 02:24:27 +00003395 if (CallInfo.isVariadic()) {
3396 // When passing non-POD arguments by value to variadic functions, we will
3397 // end up with a variadic prototype and an inalloca call site. In such
3398 // cases, we can't do any parameter mismatch checks. Give up and bitcast
3399 // the callee.
3400 unsigned CalleeAS =
3401 cast<llvm::PointerType>(Callee->getType())->getAddressSpace();
3402 Callee = Builder.CreateBitCast(
3403 Callee, getTypes().GetFunctionType(CallInfo)->getPointerTo(CalleeAS));
3404 } else {
3405 llvm::Type *LastParamTy =
3406 IRFuncTy->getParamType(IRFuncTy->getNumParams() - 1);
3407 if (Arg->getType() != LastParamTy) {
Reid Kleckner314ef7b2014-02-01 00:04:45 +00003408#ifndef NDEBUG
Reid Klecknerafba553e2014-07-08 02:24:27 +00003409 // Assert that these structs have equivalent element types.
3410 llvm::StructType *FullTy = CallInfo.getArgStruct();
3411 llvm::StructType *DeclaredTy = cast<llvm::StructType>(
3412 cast<llvm::PointerType>(LastParamTy)->getElementType());
3413 assert(DeclaredTy->getNumElements() == FullTy->getNumElements());
3414 for (llvm::StructType::element_iterator DI = DeclaredTy->element_begin(),
3415 DE = DeclaredTy->element_end(),
3416 FI = FullTy->element_begin();
3417 DI != DE; ++DI, ++FI)
3418 assert(*DI == *FI);
Reid Kleckner314ef7b2014-02-01 00:04:45 +00003419#endif
Reid Klecknerafba553e2014-07-08 02:24:27 +00003420 Arg = Builder.CreateBitCast(Arg, LastParamTy);
3421 }
Reid Kleckner314ef7b2014-02-01 00:04:45 +00003422 }
Alexey Samsonov91cf4552014-08-22 01:06:06 +00003423 assert(IRFunctionArgs.hasInallocaArg());
3424 IRCallArgs[IRFunctionArgs.getInallocaArgNo()] = Arg;
Reid Kleckner314ef7b2014-02-01 00:04:45 +00003425 }
3426
Reid Kleckner23f4c4b2013-06-21 12:45:15 +00003427 if (!CallArgs.getCleanupsToDeactivate().empty())
3428 deactivateArgCleanupsBeforeCall(*this, CallArgs);
3429
Chris Lattner4ca97c32009-06-13 00:26:38 +00003430 // If the callee is a bitcast of a function to a varargs pointer to function
3431 // type, check to see if we can remove the bitcast. This handles some cases
3432 // with unprototyped functions.
3433 if (llvm::ConstantExpr *CE = dyn_cast<llvm::ConstantExpr>(Callee))
3434 if (llvm::Function *CalleeF = dyn_cast<llvm::Function>(CE->getOperand(0))) {
Chris Lattner2192fe52011-07-18 04:24:23 +00003435 llvm::PointerType *CurPT=cast<llvm::PointerType>(Callee->getType());
3436 llvm::FunctionType *CurFT =
Chris Lattner4ca97c32009-06-13 00:26:38 +00003437 cast<llvm::FunctionType>(CurPT->getElementType());
Chris Lattner2192fe52011-07-18 04:24:23 +00003438 llvm::FunctionType *ActualFT = CalleeF->getFunctionType();
Mike Stump11289f42009-09-09 15:08:12 +00003439
Chris Lattner4ca97c32009-06-13 00:26:38 +00003440 if (CE->getOpcode() == llvm::Instruction::BitCast &&
3441 ActualFT->getReturnType() == CurFT->getReturnType() &&
Chris Lattner4c8da962009-06-23 01:38:41 +00003442 ActualFT->getNumParams() == CurFT->getNumParams() &&
Alexey Samsonov91cf4552014-08-22 01:06:06 +00003443 ActualFT->getNumParams() == IRCallArgs.size() &&
Fariborz Jahaniancf7f66f2011-03-01 17:28:13 +00003444 (CurFT->isVarArg() || !ActualFT->isVarArg())) {
Chris Lattner4ca97c32009-06-13 00:26:38 +00003445 bool ArgsMatch = true;
3446 for (unsigned i = 0, e = ActualFT->getNumParams(); i != e; ++i)
3447 if (ActualFT->getParamType(i) != CurFT->getParamType(i)) {
3448 ArgsMatch = false;
3449 break;
3450 }
Mike Stump11289f42009-09-09 15:08:12 +00003451
Chris Lattner4ca97c32009-06-13 00:26:38 +00003452 // Strip the cast if we can get away with it. This is a nice cleanup,
3453 // but also allows us to inline the function at -O0 if it is marked
3454 // always_inline.
3455 if (ArgsMatch)
3456 Callee = CalleeF;
3457 }
3458 }
Mike Stump11289f42009-09-09 15:08:12 +00003459
Alexey Samsonov91cf4552014-08-22 01:06:06 +00003460 assert(IRCallArgs.size() == IRFuncTy->getNumParams() || IRFuncTy->isVarArg());
3461 for (unsigned i = 0; i < IRCallArgs.size(); ++i) {
3462 // Inalloca argument can have different type.
3463 if (IRFunctionArgs.hasInallocaArg() &&
3464 i == IRFunctionArgs.getInallocaArgNo())
3465 continue;
3466 if (i < IRFuncTy->getNumParams())
3467 assert(IRCallArgs[i]->getType() == IRFuncTy->getParamType(i));
3468 }
3469
Daniel Dunbar0ef34792009-09-12 00:59:20 +00003470 unsigned CallingConv;
Devang Patel322300d2008-09-25 21:02:23 +00003471 CodeGen::AttributeListType AttributeList;
Bill Wendlingf4d64cb2013-02-22 00:13:35 +00003472 CGM.ConstructAttributeList(CallInfo, TargetDecl, AttributeList,
3473 CallingConv, true);
Bill Wendling3087d022012-12-07 23:17:26 +00003474 llvm::AttributeSet Attrs = llvm::AttributeSet::get(getLLVMContext(),
Bill Wendlingf4d64cb2013-02-22 00:13:35 +00003475 AttributeList);
Mike Stump11289f42009-09-09 15:08:12 +00003476
Craig Topper8a13c412014-05-21 05:09:00 +00003477 llvm::BasicBlock *InvokeDest = nullptr;
Bill Wendling5e85be42012-12-30 10:32:17 +00003478 if (!Attrs.hasAttribute(llvm::AttributeSet::FunctionIndex,
Reid Klecknere7b3f7c2015-02-11 00:00:21 +00003479 llvm::Attribute::NoUnwind) ||
3480 currentFunctionUsesSEHTry())
John McCallbd309292010-07-06 01:34:17 +00003481 InvokeDest = getInvokeDest();
3482
Daniel Dunbarb960b7b2009-03-02 04:32:35 +00003483 llvm::CallSite CS;
John McCallbd309292010-07-06 01:34:17 +00003484 if (!InvokeDest) {
Alexey Samsonov91cf4552014-08-22 01:06:06 +00003485 CS = Builder.CreateCall(Callee, IRCallArgs);
Daniel Dunbar12347492009-02-23 17:26:39 +00003486 } else {
3487 llvm::BasicBlock *Cont = createBasicBlock("invoke.cont");
Alexey Samsonov91cf4552014-08-22 01:06:06 +00003488 CS = Builder.CreateInvoke(Callee, Cont, InvokeDest, IRCallArgs);
Daniel Dunbar12347492009-02-23 17:26:39 +00003489 EmitBlock(Cont);
Daniel Dunbar5006f4a2009-02-20 18:54:31 +00003490 }
Chris Lattnere70a0072010-06-29 16:40:28 +00003491 if (callOrInvoke)
David Chisnallff5f88c2010-05-02 13:41:58 +00003492 *callOrInvoke = CS.getInstruction();
Daniel Dunbar5006f4a2009-02-20 18:54:31 +00003493
Peter Collingbourne41af7c22014-05-20 17:12:51 +00003494 if (CurCodeDecl && CurCodeDecl->hasAttr<FlattenAttr>() &&
3495 !CS.hasFnAttr(llvm::Attribute::NoInline))
3496 Attrs =
3497 Attrs.addAttribute(getLLVMContext(), llvm::AttributeSet::FunctionIndex,
3498 llvm::Attribute::AlwaysInline);
3499
Reid Klecknera5930002015-02-11 21:40:48 +00003500 // Disable inlining inside SEH __try blocks.
Reid Kleckner11c033e2015-02-12 23:40:45 +00003501 if (isSEHTryScope())
Reid Klecknera5930002015-02-11 21:40:48 +00003502 Attrs =
3503 Attrs.addAttribute(getLLVMContext(), llvm::AttributeSet::FunctionIndex,
3504 llvm::Attribute::NoInline);
3505
Daniel Dunbarb960b7b2009-03-02 04:32:35 +00003506 CS.setAttributes(Attrs);
Daniel Dunbar0ef34792009-09-12 00:59:20 +00003507 CS.setCallingConv(static_cast<llvm::CallingConv::ID>(CallingConv));
Daniel Dunbarb960b7b2009-03-02 04:32:35 +00003508
Dan Gohman515a60d2012-02-16 00:57:37 +00003509 // In ObjC ARC mode with no ObjC ARC exception safety, tell the ARC
3510 // optimizer it can aggressively ignore unwind edges.
David Blaikiebbafb8a2012-03-11 07:00:24 +00003511 if (CGM.getLangOpts().ObjCAutoRefCount)
Dan Gohman515a60d2012-02-16 00:57:37 +00003512 AddObjCARCExceptionMetadata(CS.getInstruction());
3513
Daniel Dunbarb960b7b2009-03-02 04:32:35 +00003514 // If the call doesn't return, finish the basic block and clear the
3515 // insertion point; this allows the rest of IRgen to discard
3516 // unreachable code.
3517 if (CS.doesNotReturn()) {
Leny Kholodov6aab1112015-06-08 10:23:49 +00003518 if (UnusedReturnSize)
3519 EmitLifetimeEnd(llvm::ConstantInt::get(Int64Ty, UnusedReturnSize),
3520 SRetPtr);
3521
Daniel Dunbarb960b7b2009-03-02 04:32:35 +00003522 Builder.CreateUnreachable();
3523 Builder.ClearInsertionPoint();
Mike Stump11289f42009-09-09 15:08:12 +00003524
Mike Stump18bb9282009-05-16 07:57:57 +00003525 // FIXME: For now, emit a dummy basic block because expr emitters in
3526 // generally are not ready to handle emitting expressions at unreachable
3527 // points.
Daniel Dunbarb960b7b2009-03-02 04:32:35 +00003528 EnsureInsertPoint();
Mike Stump11289f42009-09-09 15:08:12 +00003529
Daniel Dunbarb960b7b2009-03-02 04:32:35 +00003530 // Return a reasonable RValue.
3531 return GetUndefRValue(RetTy);
Mike Stump11289f42009-09-09 15:08:12 +00003532 }
Daniel Dunbarb960b7b2009-03-02 04:32:35 +00003533
3534 llvm::Instruction *CI = CS.getInstruction();
Benjamin Kramerdde0fee2009-10-05 13:47:21 +00003535 if (Builder.isNamePreserving() && !CI->getType()->isVoidTy())
Daniel Dunbar613855c2008-09-09 23:27:19 +00003536 CI->setName("call");
Daniel Dunbara72d4ae2008-09-10 02:41:04 +00003537
John McCall31168b02011-06-15 23:02:42 +00003538 // Emit any writebacks immediately. Arguably this should happen
3539 // after any return-value munging.
3540 if (CallArgs.hasWritebacks())
3541 emitWritebacks(*this, CallArgs);
3542
Nico Weber8cdb3f92015-08-25 18:43:32 +00003543 // The stack cleanup for inalloca arguments has to run out of the normal
3544 // lexical order, so deactivate it and run it manually here.
3545 CallArgs.freeArgumentMemory(*this);
3546
Hal Finkelee90a222014-09-26 05:04:30 +00003547 RValue Ret = [&] {
3548 switch (RetAI.getKind()) {
3549 case ABIArgInfo::InAlloca:
Leny Kholodov6aab1112015-06-08 10:23:49 +00003550 case ABIArgInfo::Indirect: {
3551 RValue ret = convertTempToRValue(SRetPtr, RetTy, SourceLocation());
3552 if (UnusedReturnSize)
3553 EmitLifetimeEnd(llvm::ConstantInt::get(Int64Ty, UnusedReturnSize),
3554 SRetPtr);
3555 return ret;
3556 }
Daniel Dunbard3674e62008-09-11 01:48:57 +00003557
Hal Finkelee90a222014-09-26 05:04:30 +00003558 case ABIArgInfo::Ignore:
3559 // If we are ignoring an argument that had a result, make sure to
3560 // construct the appropriate return value for our caller.
3561 return GetUndefRValue(RetTy);
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00003562
Hal Finkelee90a222014-09-26 05:04:30 +00003563 case ABIArgInfo::Extend:
3564 case ABIArgInfo::Direct: {
3565 llvm::Type *RetIRTy = ConvertType(RetTy);
3566 if (RetAI.getCoerceToType() == RetIRTy && RetAI.getDirectOffset() == 0) {
3567 switch (getEvaluationKind(RetTy)) {
3568 case TEK_Complex: {
3569 llvm::Value *Real = Builder.CreateExtractValue(CI, 0);
3570 llvm::Value *Imag = Builder.CreateExtractValue(CI, 1);
3571 return RValue::getComplex(std::make_pair(Real, Imag));
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00003572 }
Hal Finkelee90a222014-09-26 05:04:30 +00003573 case TEK_Aggregate: {
3574 llvm::Value *DestPtr = ReturnValue.getValue();
3575 bool DestIsVolatile = ReturnValue.isVolatile();
Ulrich Weigand6e2cea62015-07-10 11:31:43 +00003576 CharUnits DestAlign = getContext().getTypeAlignInChars(RetTy);
Hal Finkelee90a222014-09-26 05:04:30 +00003577
3578 if (!DestPtr) {
3579 DestPtr = CreateMemTemp(RetTy, "agg.tmp");
3580 DestIsVolatile = false;
3581 }
Ulrich Weigand6e2cea62015-07-10 11:31:43 +00003582 BuildAggStore(*this, CI, DestPtr, DestIsVolatile, DestAlign);
Hal Finkelee90a222014-09-26 05:04:30 +00003583 return RValue::getAggregate(DestPtr);
3584 }
3585 case TEK_Scalar: {
3586 // If the argument doesn't match, perform a bitcast to coerce it. This
3587 // can happen due to trivial type mismatches.
3588 llvm::Value *V = CI;
3589 if (V->getType() != RetIRTy)
3590 V = Builder.CreateBitCast(V, RetIRTy);
3591 return RValue::get(V);
3592 }
3593 }
3594 llvm_unreachable("bad evaluation kind");
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00003595 }
Hal Finkelee90a222014-09-26 05:04:30 +00003596
3597 llvm::Value *DestPtr = ReturnValue.getValue();
3598 bool DestIsVolatile = ReturnValue.isVolatile();
Ulrich Weigand6e2cea62015-07-10 11:31:43 +00003599 CharUnits DestAlign = getContext().getTypeAlignInChars(RetTy);
Hal Finkelee90a222014-09-26 05:04:30 +00003600
3601 if (!DestPtr) {
3602 DestPtr = CreateMemTemp(RetTy, "coerce");
3603 DestIsVolatile = false;
John McCall47fb9502013-03-07 21:37:08 +00003604 }
Hal Finkelee90a222014-09-26 05:04:30 +00003605
3606 // If the value is offset in memory, apply the offset now.
3607 llvm::Value *StorePtr = DestPtr;
Ulrich Weigand6e2cea62015-07-10 11:31:43 +00003608 CharUnits StoreAlign = DestAlign;
Hal Finkelee90a222014-09-26 05:04:30 +00003609 if (unsigned Offs = RetAI.getDirectOffset()) {
3610 StorePtr = Builder.CreateBitCast(StorePtr, Builder.getInt8PtrTy());
David Blaikie5e259a82015-04-03 22:54:16 +00003611 StorePtr =
3612 Builder.CreateConstGEP1_32(Builder.getInt8Ty(), StorePtr, Offs);
Hal Finkelee90a222014-09-26 05:04:30 +00003613 StorePtr = Builder.CreateBitCast(StorePtr,
3614 llvm::PointerType::getUnqual(RetAI.getCoerceToType()));
Ulrich Weigand6e2cea62015-07-10 11:31:43 +00003615 StoreAlign =
3616 StoreAlign.alignmentAtOffset(CharUnits::fromQuantity(Offs));
John McCall47fb9502013-03-07 21:37:08 +00003617 }
Ulrich Weigand6e2cea62015-07-10 11:31:43 +00003618 CreateCoercedStore(CI, StorePtr, DestIsVolatile, StoreAlign, *this);
Hal Finkelee90a222014-09-26 05:04:30 +00003619
3620 return convertTempToRValue(DestPtr, RetTy, SourceLocation());
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00003621 }
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00003622
Hal Finkelee90a222014-09-26 05:04:30 +00003623 case ABIArgInfo::Expand:
3624 llvm_unreachable("Invalid ABI kind for return argument");
Anders Carlsson17490832009-12-24 20:40:36 +00003625 }
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00003626
Hal Finkelee90a222014-09-26 05:04:30 +00003627 llvm_unreachable("Unhandled ABIArgInfo::Kind");
3628 } ();
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00003629
Hal Finkelee90a222014-09-26 05:04:30 +00003630 if (Ret.isScalar() && TargetDecl) {
3631 if (const auto *AA = TargetDecl->getAttr<AssumeAlignedAttr>()) {
3632 llvm::Value *OffsetValue = nullptr;
3633 if (const auto *Offset = AA->getOffset())
3634 OffsetValue = EmitScalarExpr(Offset);
3635
3636 llvm::Value *Alignment = EmitScalarExpr(AA->getAlignment());
3637 llvm::ConstantInt *AlignmentCI = cast<llvm::ConstantInt>(Alignment);
3638 EmitAlignmentAssumption(Ret.getScalarVal(), AlignmentCI->getZExtValue(),
3639 OffsetValue);
3640 }
Daniel Dunbar573884e2008-09-10 07:04:09 +00003641 }
Daniel Dunbard3674e62008-09-11 01:48:57 +00003642
Hal Finkelee90a222014-09-26 05:04:30 +00003643 return Ret;
Daniel Dunbar613855c2008-09-09 23:27:19 +00003644}
Daniel Dunbar2d0746f2009-02-10 20:44:09 +00003645
3646/* VarArg handling */
3647
3648llvm::Value *CodeGenFunction::EmitVAArg(llvm::Value *VAListAddr, QualType Ty) {
3649 return CGM.getTypes().getABIInfo().EmitVAArg(VAListAddr, Ty, *this);
3650}