blob: e0e895f202c2423176f9b05f427051d48ce960e3 [file] [log] [blame]
Nick Lewycky5fa40c32013-10-01 21:51:38 +00001//===--- CGCall.cpp - Encapsulate calling convention details --------------===//
Daniel Dunbar3d7c90b2008-09-08 21:33:45 +00002//
Chandler Carruth2946cd72019-01-19 08:50:56 +00003// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
Daniel Dunbar3d7c90b2008-09-08 21:33:45 +00006//
7//===----------------------------------------------------------------------===//
8//
9// These classes wrap the information about a call or function
10// definition used to handle ABI compliancy.
11//
12//===----------------------------------------------------------------------===//
13
14#include "CGCall.h"
Chris Lattnere70a0072010-06-29 16:40:28 +000015#include "ABIInfo.h"
Akira Hatanaka9d8ac612016-02-17 21:09:50 +000016#include "CGBlocks.h"
Chandler Carruth3a022472012-12-04 09:13:33 +000017#include "CGCXXABI.h"
David Majnemer4e52d6f2015-12-12 05:39:21 +000018#include "CGCleanup.h"
Daniel Dunbar3d7c90b2008-09-08 21:33:45 +000019#include "CodeGenFunction.h"
Daniel Dunbarc68897d2008-09-10 00:41:16 +000020#include "CodeGenModule.h"
John McCalla729c622012-02-17 03:33:10 +000021#include "TargetInfo.h"
Reid Kleckner98031782019-12-09 16:11:56 -080022#include "clang/AST/Attr.h"
Daniel Dunbar3d7c90b2008-09-08 21:33:45 +000023#include "clang/AST/Decl.h"
Anders Carlssonb15b55c2009-04-03 22:48:58 +000024#include "clang/AST/DeclCXX.h"
Daniel Dunbar3d7c90b2008-09-08 21:33:45 +000025#include "clang/AST/DeclObjC.h"
Richard Trieu63688182018-12-11 03:18:39 +000026#include "clang/Basic/CodeGenOptions.h"
Eric Christopher15709992015-10-15 23:47:11 +000027#include "clang/Basic/TargetBuiltins.h"
Chandler Carruth3a022472012-12-04 09:13:33 +000028#include "clang/Basic/TargetInfo.h"
Mark Laceya8e7df32013-10-30 21:53:58 +000029#include "clang/CodeGen/CGFunctionInfo.h"
John McCall12f23522016-04-04 18:33:08 +000030#include "clang/CodeGen/SwiftCallingConv.h"
Bill Wendling706469b2013-02-28 22:49:57 +000031#include "llvm/ADT/StringExtras.h"
Nick Lewyckyd9bce502016-09-20 15:49:58 +000032#include "llvm/Analysis/ValueTracking.h"
Chandler Carruthffd55512013-01-02 11:45:17 +000033#include "llvm/IR/Attributes.h"
David Blaikief47ca252018-03-21 22:34:27 +000034#include "llvm/IR/CallingConv.h"
Chandler Carruthffd55512013-01-02 11:45:17 +000035#include "llvm/IR/DataLayout.h"
36#include "llvm/IR/InlineAsm.h"
Saleem Abdulrasool10a49722016-04-08 16:52:00 +000037#include "llvm/IR/IntrinsicInst.h"
David Blaikief47ca252018-03-21 22:34:27 +000038#include "llvm/IR/Intrinsics.h"
Reid Kleckner98031782019-12-09 16:11:56 -080039#include "llvm/Transforms/Utils/Local.h"
Daniel Dunbar3d7c90b2008-09-08 21:33:45 +000040using namespace clang;
41using namespace CodeGen;
42
43/***/
44
Nikolay Haustov8c6538b2016-06-30 09:06:33 +000045unsigned CodeGenTypes::ClangCallConvToLLVMCallConv(CallingConv CC) {
John McCallab26cfa2010-02-05 21:31:56 +000046 switch (CC) {
47 default: return llvm::CallingConv::C;
48 case CC_X86StdCall: return llvm::CallingConv::X86_StdCall;
49 case CC_X86FastCall: return llvm::CallingConv::X86_FastCall;
Erich Keane757d3172016-11-02 18:29:35 +000050 case CC_X86RegCall: return llvm::CallingConv::X86_RegCall;
Douglas Gregora941dca2010-05-18 16:57:00 +000051 case CC_X86ThisCall: return llvm::CallingConv::X86_ThisCall;
Martin Storsjo022e7822017-07-17 20:49:45 +000052 case CC_Win64: return llvm::CallingConv::Win64;
Charles Davisb5a214e2013-08-30 04:39:01 +000053 case CC_X86_64SysV: return llvm::CallingConv::X86_64_SysV;
Anton Korobeynikov231e8752011-04-14 20:06:49 +000054 case CC_AAPCS: return llvm::CallingConv::ARM_AAPCS;
55 case CC_AAPCS_VFP: return llvm::CallingConv::ARM_AAPCS_VFP;
Guy Benyeif0a014b2012-12-25 08:53:55 +000056 case CC_IntelOclBicc: return llvm::CallingConv::Intel_OCL_BI;
Reid Klecknerd7857f02014-10-24 17:42:17 +000057 // TODO: Add support for __pascal to LLVM.
58 case CC_X86Pascal: return llvm::CallingConv::C;
59 // TODO: Add support for __vectorcall to LLVM.
Reid Kleckner80944df2014-10-31 22:00:51 +000060 case CC_X86VectorCall: return llvm::CallingConv::X86_VectorCall;
Sander de Smalen44a22532018-11-26 16:38:37 +000061 case CC_AArch64VectorCall: return llvm::CallingConv::AArch64_VectorCall;
Alexander Kornienko21de0ae2015-01-20 11:20:41 +000062 case CC_SpirFunction: return llvm::CallingConv::SPIR_FUNC;
Nikolay Haustov8c6538b2016-06-30 09:06:33 +000063 case CC_OpenCLKernel: return CGM.getTargetCodeGenInfo().getOpenCLKernelCallingConv();
Roman Levenstein35aa5ce2016-03-16 18:00:46 +000064 case CC_PreserveMost: return llvm::CallingConv::PreserveMost;
65 case CC_PreserveAll: return llvm::CallingConv::PreserveAll;
John McCall12f23522016-04-04 18:33:08 +000066 case CC_Swift: return llvm::CallingConv::Swift;
John McCallab26cfa2010-02-05 21:31:56 +000067 }
68}
69
Mikael Nilsson9d2872d2018-12-13 10:15:27 +000070/// Derives the 'this' type for codegen purposes, i.e. ignoring method CVR
James Y Knightb92d2902019-02-05 16:05:50 +000071/// qualification. Either or both of RD and MD may be null. A null RD indicates
72/// that there is no meaningful 'this' type, and a null MD can occur when
73/// calling a method pointer.
74CanQualType CodeGenTypes::DeriveThisType(const CXXRecordDecl *RD,
75 const CXXMethodDecl *MD) {
76 QualType RecTy;
77 if (RD)
78 RecTy = Context.getTagDeclType(RD)->getCanonicalTypeInternal();
79 else
80 RecTy = Context.VoidTy;
81
Mikael Nilsson9d2872d2018-12-13 10:15:27 +000082 if (MD)
Anastasia Stulovac61eaa52019-01-28 11:37:49 +000083 RecTy = Context.getAddrSpaceQualType(RecTy, MD->getMethodQualifiers().getAddressSpace());
John McCall2da83a32010-02-26 00:48:12 +000084 return Context.getPointerType(CanQualType::CreateUnsafe(RecTy));
Daniel Dunbar7a95ca32008-09-10 04:01:49 +000085}
86
John McCall8ee376f2010-02-24 07:14:12 +000087/// Returns the canonical formal type of the given C++ method.
John McCall2da83a32010-02-26 00:48:12 +000088static CanQual<FunctionProtoType> GetFormalType(const CXXMethodDecl *MD) {
89 return MD->getType()->getCanonicalTypeUnqualified()
90 .getAs<FunctionProtoType>();
John McCall8ee376f2010-02-24 07:14:12 +000091}
92
93/// Returns the "extra-canonicalized" return type, which discards
94/// qualifiers on the return type. Codegen doesn't care about them,
95/// and it makes ABI code a little easier to be able to assume that
96/// all parameter and return types are top-level unqualified.
John McCall2da83a32010-02-26 00:48:12 +000097static CanQualType GetReturnType(QualType RetTy) {
98 return RetTy->getCanonicalTypeUnqualified().getUnqualifiedType();
John McCall8ee376f2010-02-24 07:14:12 +000099}
100
John McCall8dda7b22012-07-07 06:41:13 +0000101/// Arrange the argument and result information for a value of the given
102/// unprototyped freestanding function type.
John McCall8ee376f2010-02-24 07:14:12 +0000103const CGFunctionInfo &
John McCall8dda7b22012-07-07 06:41:13 +0000104CodeGenTypes::arrangeFreeFunctionType(CanQual<FunctionNoProtoType> FTNP) {
John McCalla729c622012-02-17 03:33:10 +0000105 // When translating an unprototyped function type, always use a
106 // variadic type.
Alp Toker314cc812014-01-25 16:55:45 +0000107 return arrangeLLVMFunctionInfo(FTNP->getReturnType().getUnqualifiedType(),
Peter Collingbournef7706832014-12-12 23:41:25 +0000108 /*instanceMethod=*/false,
109 /*chainCall=*/false, None,
John McCallc56a8b32016-03-11 04:30:31 +0000110 FTNP->getExtInfo(), {}, RequiredArgs(0));
John McCall8ee376f2010-02-24 07:14:12 +0000111}
112
George Burgess IVb7760212017-02-24 02:49:47 +0000113static void addExtParameterInfosForCall(
114 llvm::SmallVectorImpl<FunctionProtoType::ExtParameterInfo> &paramInfos,
115 const FunctionProtoType *proto,
116 unsigned prefixArgs,
117 unsigned totalArgs) {
118 assert(proto->hasExtParameterInfos());
119 assert(paramInfos.size() <= prefixArgs);
120 assert(proto->getNumParams() + prefixArgs <= totalArgs);
121
122 paramInfos.reserve(totalArgs);
123
124 // Add default infos for any prefix args that don't already have infos.
125 paramInfos.resize(prefixArgs);
126
127 // Add infos for the prototype.
128 for (const auto &ParamInfo : proto->getExtParameterInfos()) {
129 paramInfos.push_back(ParamInfo);
130 // pass_object_size params have no parameter info.
131 if (ParamInfo.hasPassObjectSize())
132 paramInfos.emplace_back();
133 }
134
135 assert(paramInfos.size() <= totalArgs &&
136 "Did we forget to insert pass_object_size args?");
137 // Add default infos for the variadic and/or suffix arguments.
138 paramInfos.resize(totalArgs);
139}
140
Hiroshi Inouec5e54dd2017-07-03 08:49:44 +0000141/// Adds the formal parameters in FPT to the given prefix. If any parameter in
George Burgess IV3e3bb95b2015-12-02 21:58:08 +0000142/// FPT has pass_object_size attrs, then we'll add parameters for those, too.
143static void appendParameterTypes(const CodeGenTypes &CGT,
144 SmallVectorImpl<CanQualType> &prefix,
John McCallc56a8b32016-03-11 04:30:31 +0000145 SmallVectorImpl<FunctionProtoType::ExtParameterInfo> &paramInfos,
George Burgess IVb7760212017-02-24 02:49:47 +0000146 CanQual<FunctionProtoType> FPT) {
147 // Fast path: don't touch param info if we don't need to.
148 if (!FPT->hasExtParameterInfos()) {
149 assert(paramInfos.empty() &&
150 "We have paramInfos, but the prototype doesn't?");
George Burgess IV3e3bb95b2015-12-02 21:58:08 +0000151 prefix.append(FPT->param_type_begin(), FPT->param_type_end());
152 return;
153 }
154
George Burgess IVb7760212017-02-24 02:49:47 +0000155 unsigned PrefixSize = prefix.size();
156 // In the vast majority of cases, we'll have precisely FPT->getNumParams()
George Burgess IV3e3bb95b2015-12-02 21:58:08 +0000157 // parameters; the only thing that can change this is the presence of
158 // pass_object_size. So, we preallocate for the common case.
159 prefix.reserve(prefix.size() + FPT->getNumParams());
160
George Burgess IVb7760212017-02-24 02:49:47 +0000161 auto ExtInfos = FPT->getExtParameterInfos();
162 assert(ExtInfos.size() == FPT->getNumParams());
George Burgess IV3e3bb95b2015-12-02 21:58:08 +0000163 for (unsigned I = 0, E = FPT->getNumParams(); I != E; ++I) {
164 prefix.push_back(FPT->getParamType(I));
George Burgess IVb7760212017-02-24 02:49:47 +0000165 if (ExtInfos[I].hasPassObjectSize())
George Burgess IV3e3bb95b2015-12-02 21:58:08 +0000166 prefix.push_back(CGT.getContext().getSizeType());
167 }
George Burgess IVb7760212017-02-24 02:49:47 +0000168
169 addExtParameterInfosForCall(paramInfos, FPT.getTypePtr(), PrefixSize,
170 prefix.size());
George Burgess IV3e3bb95b2015-12-02 21:58:08 +0000171}
172
John McCall8dda7b22012-07-07 06:41:13 +0000173/// Arrange the LLVM function layout for a value of the given function
Alexey Samsonove5ef3ca2014-08-13 23:55:54 +0000174/// type, on top of any implicit parameters already stored.
175static const CGFunctionInfo &
Peter Collingbournef7706832014-12-12 23:41:25 +0000176arrangeLLVMFunctionInfo(CodeGenTypes &CGT, bool instanceMethod,
Alexey Samsonove5ef3ca2014-08-13 23:55:54 +0000177 SmallVectorImpl<CanQualType> &prefix,
James Y Knight916db652019-02-02 01:48:23 +0000178 CanQual<FunctionProtoType> FTP) {
John McCallc56a8b32016-03-11 04:30:31 +0000179 SmallVector<FunctionProtoType::ExtParameterInfo, 16> paramInfos;
James Y Knight916db652019-02-02 01:48:23 +0000180 RequiredArgs Required = RequiredArgs::forPrototypePlus(FTP, prefix.size());
Daniel Dunbarbf8c24a2009-02-02 23:23:47 +0000181 // FIXME: Kill copy.
George Burgess IVb7760212017-02-24 02:49:47 +0000182 appendParameterTypes(CGT, prefix, paramInfos, FTP);
Alp Toker314cc812014-01-25 16:55:45 +0000183 CanQualType resultType = FTP->getReturnType().getUnqualifiedType();
John McCallc56a8b32016-03-11 04:30:31 +0000184
Peter Collingbournef7706832014-12-12 23:41:25 +0000185 return CGT.arrangeLLVMFunctionInfo(resultType, instanceMethod,
186 /*chainCall=*/false, prefix,
John McCallc56a8b32016-03-11 04:30:31 +0000187 FTP->getExtInfo(), paramInfos,
George Burgess IV419996c2016-06-16 23:06:04 +0000188 Required);
John McCall8ee376f2010-02-24 07:14:12 +0000189}
190
John McCalla729c622012-02-17 03:33:10 +0000191/// Arrange the argument and result information for a value of the
John McCall8dda7b22012-07-07 06:41:13 +0000192/// given freestanding function type.
John McCall8ee376f2010-02-24 07:14:12 +0000193const CGFunctionInfo &
James Y Knight916db652019-02-02 01:48:23 +0000194CodeGenTypes::arrangeFreeFunctionType(CanQual<FunctionProtoType> FTP) {
John McCalla729c622012-02-17 03:33:10 +0000195 SmallVector<CanQualType, 16> argTypes;
Peter Collingbournef7706832014-12-12 23:41:25 +0000196 return ::arrangeLLVMFunctionInfo(*this, /*instanceMethod=*/false, argTypes,
James Y Knight916db652019-02-02 01:48:23 +0000197 FTP);
Daniel Dunbar7feafc72009-09-11 22:24:53 +0000198}
199
Aaron Ballman0362a6d2013-12-18 16:23:37 +0000200static CallingConv getCallingConventionForDecl(const Decl *D, bool IsWindows) {
Daniel Dunbar7feafc72009-09-11 22:24:53 +0000201 // Set the appropriate calling convention for the Function.
202 if (D->hasAttr<StdCallAttr>())
John McCallab26cfa2010-02-05 21:31:56 +0000203 return CC_X86StdCall;
Daniel Dunbar7feafc72009-09-11 22:24:53 +0000204
205 if (D->hasAttr<FastCallAttr>())
John McCallab26cfa2010-02-05 21:31:56 +0000206 return CC_X86FastCall;
Daniel Dunbar7feafc72009-09-11 22:24:53 +0000207
Erich Keane757d3172016-11-02 18:29:35 +0000208 if (D->hasAttr<RegCallAttr>())
209 return CC_X86RegCall;
210
Douglas Gregora941dca2010-05-18 16:57:00 +0000211 if (D->hasAttr<ThisCallAttr>())
212 return CC_X86ThisCall;
213
Reid Klecknerd7857f02014-10-24 17:42:17 +0000214 if (D->hasAttr<VectorCallAttr>())
215 return CC_X86VectorCall;
216
Dawn Perchik335e16b2010-09-03 01:29:35 +0000217 if (D->hasAttr<PascalAttr>())
218 return CC_X86Pascal;
219
Anton Korobeynikov231e8752011-04-14 20:06:49 +0000220 if (PcsAttr *PCS = D->getAttr<PcsAttr>())
221 return (PCS->getPCS() == PcsAttr::AAPCS ? CC_AAPCS : CC_AAPCS_VFP);
222
Sander de Smalen44a22532018-11-26 16:38:37 +0000223 if (D->hasAttr<AArch64VectorPcsAttr>())
224 return CC_AArch64VectorCall;
225
Guy Benyeif0a014b2012-12-25 08:53:55 +0000226 if (D->hasAttr<IntelOclBiccAttr>())
227 return CC_IntelOclBicc;
228
Aaron Ballman0362a6d2013-12-18 16:23:37 +0000229 if (D->hasAttr<MSABIAttr>())
Martin Storsjo022e7822017-07-17 20:49:45 +0000230 return IsWindows ? CC_C : CC_Win64;
Aaron Ballman0362a6d2013-12-18 16:23:37 +0000231
232 if (D->hasAttr<SysVABIAttr>())
233 return IsWindows ? CC_X86_64SysV : CC_C;
234
Roman Levenstein35aa5ce2016-03-16 18:00:46 +0000235 if (D->hasAttr<PreserveMostAttr>())
236 return CC_PreserveMost;
237
238 if (D->hasAttr<PreserveAllAttr>())
239 return CC_PreserveAll;
240
John McCallab26cfa2010-02-05 21:31:56 +0000241 return CC_C;
Daniel Dunbar7a95ca32008-09-10 04:01:49 +0000242}
243
John McCalla729c622012-02-17 03:33:10 +0000244/// Arrange the argument and result information for a call to an
245/// unknown C++ non-static member function of the given abstract type.
James Y Knightb92d2902019-02-05 16:05:50 +0000246/// (A null RD means we don't have any meaningful "this" argument type,
Timur Iskhodzhanov88fd4392013-08-21 06:25:03 +0000247/// so fall back to a generic pointer type).
John McCalla729c622012-02-17 03:33:10 +0000248/// The member function must be an ordinary function, i.e. not a
249/// constructor or destructor.
250const CGFunctionInfo &
251CodeGenTypes::arrangeCXXMethodType(const CXXRecordDecl *RD,
George Burgess IV3e3bb95b2015-12-02 21:58:08 +0000252 const FunctionProtoType *FTP,
253 const CXXMethodDecl *MD) {
John McCalla729c622012-02-17 03:33:10 +0000254 SmallVector<CanQualType, 16> argTypes;
John McCall8ee376f2010-02-24 07:14:12 +0000255
Anders Carlsson2ee3c012009-10-03 19:43:08 +0000256 // Add the 'this' pointer.
James Y Knightb92d2902019-02-05 16:05:50 +0000257 argTypes.push_back(DeriveThisType(RD, MD));
John McCall8ee376f2010-02-24 07:14:12 +0000258
Alexey Samsonove5ef3ca2014-08-13 23:55:54 +0000259 return ::arrangeLLVMFunctionInfo(
260 *this, true, argTypes,
James Y Knight916db652019-02-02 01:48:23 +0000261 FTP->getCanonicalTypeUnqualified().getAs<FunctionProtoType>());
Anders Carlsson2ee3c012009-10-03 19:43:08 +0000262}
263
Yaxun Liu6c10a662018-06-12 00:16:33 +0000264/// Set calling convention for CUDA/HIP kernel.
265static void setCUDAKernelCallingConvention(CanQualType &FTy, CodeGenModule &CGM,
266 const FunctionDecl *FD) {
267 if (FD->hasAttr<CUDAGlobalAttr>()) {
268 const FunctionType *FT = FTy->getAs<FunctionType>();
269 CGM.getTargetCodeGenInfo().setCUDAKernelCallingConvention(FT);
270 FTy = FT->getCanonicalTypeUnqualified();
271 }
272}
273
John McCalla729c622012-02-17 03:33:10 +0000274/// Arrange the argument and result information for a declaration or
275/// definition of the given C++ non-static member function. The
276/// member function must be an ordinary function, i.e. not a
277/// constructor or destructor.
278const CGFunctionInfo &
279CodeGenTypes::arrangeCXXMethodDeclaration(const CXXMethodDecl *MD) {
Benjamin Kramer60509af2013-09-09 14:48:42 +0000280 assert(!isa<CXXConstructorDecl>(MD) && "wrong method for constructors!");
John McCall0d635f52010-09-03 01:26:39 +0000281 assert(!isa<CXXDestructorDecl>(MD) && "wrong method for destructors!");
282
Yaxun Liu6c10a662018-06-12 00:16:33 +0000283 CanQualType FT = GetFormalType(MD).getAs<Type>();
284 setCUDAKernelCallingConvention(FT, CGM, MD);
285 auto prototype = FT.getAs<FunctionProtoType>();
Mike Stump11289f42009-09-09 15:08:12 +0000286
John McCalla729c622012-02-17 03:33:10 +0000287 if (MD->isInstance()) {
288 // The abstract case is perfectly fine.
Mark Lacey5ea993b2013-10-02 20:35:23 +0000289 const CXXRecordDecl *ThisType = TheCXXABI.getThisArgumentTypeForMethod(MD);
George Burgess IV3e3bb95b2015-12-02 21:58:08 +0000290 return arrangeCXXMethodType(ThisType, prototype.getTypePtr(), MD);
John McCalla729c622012-02-17 03:33:10 +0000291 }
292
James Y Knight916db652019-02-02 01:48:23 +0000293 return arrangeFreeFunctionType(prototype);
Anders Carlssonb15b55c2009-04-03 22:48:58 +0000294}
295
Richard Smith5179eb72016-06-28 19:03:57 +0000296bool CodeGenTypes::inheritingCtorHasParams(
297 const InheritedConstructor &Inherited, CXXCtorType Type) {
298 // Parameters are unnecessary if we're constructing a base class subobject
299 // and the inherited constructor lives in a virtual base.
300 return Type == Ctor_Complete ||
301 !Inherited.getShadowDecl()->constructsVirtualBase() ||
302 !Target.getCXXABI().hasConstructorVariants();
Peter Collingbourned1c5b282019-03-22 23:05:10 +0000303}
Richard Smith5179eb72016-06-28 19:03:57 +0000304
John McCalla729c622012-02-17 03:33:10 +0000305const CGFunctionInfo &
Peter Collingbourned1c5b282019-03-22 23:05:10 +0000306CodeGenTypes::arrangeCXXStructorDeclaration(GlobalDecl GD) {
307 auto *MD = cast<CXXMethodDecl>(GD.getDecl());
Rafael Espindola8d2a19b2014-09-08 16:01:27 +0000308
John McCalla729c622012-02-17 03:33:10 +0000309 SmallVector<CanQualType, 16> argTypes;
John McCallc56a8b32016-03-11 04:30:31 +0000310 SmallVector<FunctionProtoType::ExtParameterInfo, 16> paramInfos;
James Y Knightb92d2902019-02-05 16:05:50 +0000311 argTypes.push_back(DeriveThisType(MD->getParent(), MD));
Stephen Lin9dc6eef2013-06-30 20:40:16 +0000312
Richard Smith5179eb72016-06-28 19:03:57 +0000313 bool PassParams = true;
314
Rafael Espindola8d2a19b2014-09-08 16:01:27 +0000315 if (auto *CD = dyn_cast<CXXConstructorDecl>(MD)) {
Richard Smith5179eb72016-06-28 19:03:57 +0000316 // A base class inheriting constructor doesn't get forwarded arguments
317 // needed to construct a virtual base (or base class thereof).
318 if (auto Inherited = CD->getInheritedConstructor())
Peter Collingbourned1c5b282019-03-22 23:05:10 +0000319 PassParams = inheritingCtorHasParams(Inherited, GD.getCtorType());
Rafael Espindola8d2a19b2014-09-08 16:01:27 +0000320 }
Anders Carlsson82ba57c2009-11-25 03:15:49 +0000321
Rafael Espindola8d2a19b2014-09-08 16:01:27 +0000322 CanQual<FunctionProtoType> FTP = GetFormalType(MD);
John McCall5d865c322010-08-31 07:33:07 +0000323
324 // Add the formal parameters.
Richard Smith5179eb72016-06-28 19:03:57 +0000325 if (PassParams)
George Burgess IVb7760212017-02-24 02:49:47 +0000326 appendParameterTypes(*this, argTypes, paramInfos, FTP);
John McCall5d865c322010-08-31 07:33:07 +0000327
George Burgess IV75b34a92017-02-22 22:38:25 +0000328 CGCXXABI::AddedStructorArgs AddedArgs =
Peter Collingbourned1c5b282019-03-22 23:05:10 +0000329 TheCXXABI.buildStructorSignature(GD, argTypes);
George Burgess IV75b34a92017-02-22 22:38:25 +0000330 if (!paramInfos.empty()) {
331 // Note: prefix implies after the first param.
332 if (AddedArgs.Prefix)
333 paramInfos.insert(paramInfos.begin() + 1, AddedArgs.Prefix,
334 FunctionProtoType::ExtParameterInfo{});
335 if (AddedArgs.Suffix)
336 paramInfos.append(AddedArgs.Suffix,
337 FunctionProtoType::ExtParameterInfo{});
338 }
Reid Kleckner89077a12013-12-17 19:46:40 +0000339
340 RequiredArgs required =
Richard Smith5179eb72016-06-28 19:03:57 +0000341 (PassParams && MD->isVariadic() ? RequiredArgs(argTypes.size())
342 : RequiredArgs::All);
Reid Kleckner89077a12013-12-17 19:46:40 +0000343
John McCall8dda7b22012-07-07 06:41:13 +0000344 FunctionType::ExtInfo extInfo = FTP->getExtInfo();
David Majnemer0c0b6d92014-10-31 20:09:12 +0000345 CanQualType resultType = TheCXXABI.HasThisReturn(GD)
346 ? argTypes.front()
347 : TheCXXABI.hasMostDerivedReturn(GD)
348 ? CGM.getContext().VoidPtrTy
349 : Context.VoidTy;
Peter Collingbournef7706832014-12-12 23:41:25 +0000350 return arrangeLLVMFunctionInfo(resultType, /*instanceMethod=*/true,
351 /*chainCall=*/false, argTypes, extInfo,
John McCallc56a8b32016-03-11 04:30:31 +0000352 paramInfos, required);
353}
354
355static SmallVector<CanQualType, 16>
356getArgTypesForCall(ASTContext &ctx, const CallArgList &args) {
357 SmallVector<CanQualType, 16> argTypes;
358 for (auto &arg : args)
359 argTypes.push_back(ctx.getCanonicalParamType(arg.Ty));
360 return argTypes;
361}
362
363static SmallVector<CanQualType, 16>
364getArgTypesForDeclaration(ASTContext &ctx, const FunctionArgList &args) {
365 SmallVector<CanQualType, 16> argTypes;
366 for (auto &arg : args)
367 argTypes.push_back(ctx.getCanonicalParamType(arg->getType()));
368 return argTypes;
369}
370
John McCallc56a8b32016-03-11 04:30:31 +0000371static llvm::SmallVector<FunctionProtoType::ExtParameterInfo, 16>
372getExtParameterInfosForCall(const FunctionProtoType *proto,
373 unsigned prefixArgs, unsigned totalArgs) {
374 llvm::SmallVector<FunctionProtoType::ExtParameterInfo, 16> result;
375 if (proto->hasExtParameterInfos()) {
376 addExtParameterInfosForCall(result, proto, prefixArgs, totalArgs);
377 }
378 return result;
Anders Carlsson82ba57c2009-11-25 03:15:49 +0000379}
380
Reid Kleckner314ef7b2014-02-01 00:04:45 +0000381/// Arrange a call to a C++ method, passing the given arguments.
George Burgess IVd0a9e802017-02-23 22:07:35 +0000382///
383/// ExtraPrefixArgs is the number of ABI-specific args passed after the `this`
384/// parameter.
385/// ExtraSuffixArgs is the number of ABI-specific args passed at the end of
386/// args.
387/// PassProtoArgs indicates whether `args` has args for the parameters in the
388/// given CXXConstructorDecl.
Reid Kleckner314ef7b2014-02-01 00:04:45 +0000389const CGFunctionInfo &
390CodeGenTypes::arrangeCXXConstructorCall(const CallArgList &args,
391 const CXXConstructorDecl *D,
392 CXXCtorType CtorKind,
George Burgess IVd0a9e802017-02-23 22:07:35 +0000393 unsigned ExtraPrefixArgs,
394 unsigned ExtraSuffixArgs,
395 bool PassProtoArgs) {
Reid Kleckner314ef7b2014-02-01 00:04:45 +0000396 // FIXME: Kill copy.
397 SmallVector<CanQualType, 16> ArgTypes;
Alexey Samsonov3551e312014-08-13 20:06:24 +0000398 for (const auto &Arg : args)
399 ArgTypes.push_back(Context.getCanonicalParamType(Arg.Ty));
Reid Kleckner314ef7b2014-02-01 00:04:45 +0000400
George Burgess IVd0a9e802017-02-23 22:07:35 +0000401 // +1 for implicit this, which should always be args[0].
402 unsigned TotalPrefixArgs = 1 + ExtraPrefixArgs;
403
Reid Kleckner314ef7b2014-02-01 00:04:45 +0000404 CanQual<FunctionProtoType> FPT = GetFormalType(D);
James Y Knightb92d2902019-02-05 16:05:50 +0000405 RequiredArgs Required = PassProtoArgs
406 ? RequiredArgs::forPrototypePlus(
407 FPT, TotalPrefixArgs + ExtraSuffixArgs)
408 : RequiredArgs::All;
409
Reid Kleckner314ef7b2014-02-01 00:04:45 +0000410 GlobalDecl GD(D, CtorKind);
David Majnemer0c0b6d92014-10-31 20:09:12 +0000411 CanQualType ResultType = TheCXXABI.HasThisReturn(GD)
412 ? ArgTypes.front()
413 : TheCXXABI.hasMostDerivedReturn(GD)
414 ? CGM.getContext().VoidPtrTy
415 : Context.VoidTy;
Reid Kleckner314ef7b2014-02-01 00:04:45 +0000416
417 FunctionType::ExtInfo Info = FPT->getExtInfo();
George Burgess IVd0a9e802017-02-23 22:07:35 +0000418 llvm::SmallVector<FunctionProtoType::ExtParameterInfo, 16> ParamInfos;
419 // If the prototype args are elided, we should only have ABI-specific args,
420 // which never have param info.
421 if (PassProtoArgs && FPT->hasExtParameterInfos()) {
422 // ABI-specific suffix arguments are treated the same as variadic arguments.
423 addExtParameterInfosForCall(ParamInfos, FPT.getTypePtr(), TotalPrefixArgs,
424 ArgTypes.size());
425 }
Peter Collingbournef7706832014-12-12 23:41:25 +0000426 return arrangeLLVMFunctionInfo(ResultType, /*instanceMethod=*/true,
427 /*chainCall=*/false, ArgTypes, Info,
John McCallc56a8b32016-03-11 04:30:31 +0000428 ParamInfos, Required);
Reid Kleckner314ef7b2014-02-01 00:04:45 +0000429}
430
John McCalla729c622012-02-17 03:33:10 +0000431/// Arrange the argument and result information for the declaration or
432/// definition of the given function.
433const CGFunctionInfo &
434CodeGenTypes::arrangeFunctionDeclaration(const FunctionDecl *FD) {
Chris Lattnerbea5b622009-05-12 20:27:19 +0000435 if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(FD))
Anders Carlssonb15b55c2009-04-03 22:48:58 +0000436 if (MD->isInstance())
John McCalla729c622012-02-17 03:33:10 +0000437 return arrangeCXXMethodDeclaration(MD);
Mike Stump11289f42009-09-09 15:08:12 +0000438
John McCall2da83a32010-02-26 00:48:12 +0000439 CanQualType FTy = FD->getType()->getCanonicalTypeUnqualified();
John McCalla729c622012-02-17 03:33:10 +0000440
John McCall2da83a32010-02-26 00:48:12 +0000441 assert(isa<FunctionType>(FTy));
Yaxun Liu6c10a662018-06-12 00:16:33 +0000442 setCUDAKernelCallingConvention(FTy, CGM, FD);
John McCalla729c622012-02-17 03:33:10 +0000443
444 // When declaring a function without a prototype, always use a
445 // non-variadic type.
George Burgess IV35cfca22017-01-06 19:10:48 +0000446 if (CanQual<FunctionNoProtoType> noProto = FTy.getAs<FunctionNoProtoType>()) {
Peter Collingbournef7706832014-12-12 23:41:25 +0000447 return arrangeLLVMFunctionInfo(
448 noProto->getReturnType(), /*instanceMethod=*/false,
John McCallc56a8b32016-03-11 04:30:31 +0000449 /*chainCall=*/false, None, noProto->getExtInfo(), {},RequiredArgs::All);
John McCalla729c622012-02-17 03:33:10 +0000450 }
451
James Y Knight916db652019-02-02 01:48:23 +0000452 return arrangeFreeFunctionType(FTy.castAs<FunctionProtoType>());
Daniel Dunbar3d7c90b2008-09-08 21:33:45 +0000453}
454
John McCalla729c622012-02-17 03:33:10 +0000455/// Arrange the argument and result information for the declaration or
456/// definition of an Objective-C method.
457const CGFunctionInfo &
458CodeGenTypes::arrangeObjCMethodDeclaration(const ObjCMethodDecl *MD) {
459 // It happens that this is the same as a call with no optional
460 // arguments, except also using the formal 'self' type.
461 return arrangeObjCMessageSendSignature(MD, MD->getSelfDecl()->getType());
462}
463
464/// Arrange the argument and result information for the function type
465/// through which to perform a send to the given Objective-C method,
466/// using the given receiver type. The receiver type is not always
467/// the 'self' type of the method or even an Objective-C pointer type.
468/// This is *not* the right method for actually performing such a
469/// message send, due to the possibility of optional arguments.
470const CGFunctionInfo &
471CodeGenTypes::arrangeObjCMessageSendSignature(const ObjCMethodDecl *MD,
472 QualType receiverType) {
473 SmallVector<CanQualType, 16> argTys;
Akira Hatanaka98a49332017-09-22 00:41:05 +0000474 SmallVector<FunctionProtoType::ExtParameterInfo, 4> extParamInfos(2);
John McCalla729c622012-02-17 03:33:10 +0000475 argTys.push_back(Context.getCanonicalParamType(receiverType));
476 argTys.push_back(Context.getCanonicalParamType(Context.getObjCSelType()));
Daniel Dunbarbf8c24a2009-02-02 23:23:47 +0000477 // FIXME: Kill copy?
David Majnemer59f77922016-06-24 04:05:48 +0000478 for (const auto *I : MD->parameters()) {
Aaron Ballman43b68be2014-03-07 17:50:17 +0000479 argTys.push_back(Context.getCanonicalParamType(I->getType()));
Akira Hatanaka98a49332017-09-22 00:41:05 +0000480 auto extParamInfo = FunctionProtoType::ExtParameterInfo().withIsNoEscape(
481 I->hasAttr<NoEscapeAttr>());
482 extParamInfos.push_back(extParamInfo);
John McCall8ee376f2010-02-24 07:14:12 +0000483 }
John McCall31168b02011-06-15 23:02:42 +0000484
485 FunctionType::ExtInfo einfo;
Aaron Ballman0362a6d2013-12-18 16:23:37 +0000486 bool IsWindows = getContext().getTargetInfo().getTriple().isOSWindows();
487 einfo = einfo.withCallingConv(getCallingConventionForDecl(MD, IsWindows));
John McCall31168b02011-06-15 23:02:42 +0000488
David Blaikiebbafb8a2012-03-11 07:00:24 +0000489 if (getContext().getLangOpts().ObjCAutoRefCount &&
John McCall31168b02011-06-15 23:02:42 +0000490 MD->hasAttr<NSReturnsRetainedAttr>())
491 einfo = einfo.withProducesResult(true);
492
John McCalla729c622012-02-17 03:33:10 +0000493 RequiredArgs required =
494 (MD->isVariadic() ? RequiredArgs(argTys.size()) : RequiredArgs::All);
495
Peter Collingbournef7706832014-12-12 23:41:25 +0000496 return arrangeLLVMFunctionInfo(
497 GetReturnType(MD->getReturnType()), /*instanceMethod=*/false,
Akira Hatanaka98a49332017-09-22 00:41:05 +0000498 /*chainCall=*/false, argTys, einfo, extParamInfos, required);
John McCallc56a8b32016-03-11 04:30:31 +0000499}
500
501const CGFunctionInfo &
502CodeGenTypes::arrangeUnprototypedObjCMessageSend(QualType returnType,
503 const CallArgList &args) {
504 auto argTypes = getArgTypesForCall(Context, args);
505 FunctionType::ExtInfo einfo;
506
507 return arrangeLLVMFunctionInfo(
508 GetReturnType(returnType), /*instanceMethod=*/false,
509 /*chainCall=*/false, argTypes, einfo, {}, RequiredArgs::All);
Daniel Dunbar3d7c90b2008-09-08 21:33:45 +0000510}
511
John McCalla729c622012-02-17 03:33:10 +0000512const CGFunctionInfo &
513CodeGenTypes::arrangeGlobalDeclaration(GlobalDecl GD) {
Anders Carlsson6710c532010-02-06 02:44:09 +0000514 // FIXME: Do we need to handle ObjCMethodDecl?
515 const FunctionDecl *FD = cast<FunctionDecl>(GD.getDecl());
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +0000516
Peter Collingbourned1c5b282019-03-22 23:05:10 +0000517 if (isa<CXXConstructorDecl>(GD.getDecl()) ||
518 isa<CXXDestructorDecl>(GD.getDecl()))
519 return arrangeCXXStructorDeclaration(GD);
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +0000520
John McCalla729c622012-02-17 03:33:10 +0000521 return arrangeFunctionDeclaration(FD);
Anders Carlsson6710c532010-02-06 02:44:09 +0000522}
523
Reid Klecknerc3473512014-08-29 21:43:29 +0000524/// Arrange a thunk that takes 'this' as the first parameter followed by
525/// varargs. Return a void pointer, regardless of the actual return type.
526/// The body of the thunk will end in a musttail call to a function of the
527/// correct type, and the caller will bitcast the function to the correct
528/// prototype.
529const CGFunctionInfo &
Reid Kleckner399d96e2018-04-02 20:20:33 +0000530CodeGenTypes::arrangeUnprototypedMustTailThunk(const CXXMethodDecl *MD) {
531 assert(MD->isVirtual() && "only methods have thunks");
Reid Klecknerc3473512014-08-29 21:43:29 +0000532 CanQual<FunctionProtoType> FTP = GetFormalType(MD);
James Y Knightb92d2902019-02-05 16:05:50 +0000533 CanQualType ArgTys[] = {DeriveThisType(MD->getParent(), MD)};
Peter Collingbournef7706832014-12-12 23:41:25 +0000534 return arrangeLLVMFunctionInfo(Context.VoidTy, /*instanceMethod=*/false,
535 /*chainCall=*/false, ArgTys,
John McCallc56a8b32016-03-11 04:30:31 +0000536 FTP->getExtInfo(), {}, RequiredArgs(1));
Reid Klecknerc3473512014-08-29 21:43:29 +0000537}
538
David Majnemerdfa6d202015-03-11 18:36:39 +0000539const CGFunctionInfo &
David Majnemer37fd66e2015-03-13 22:36:55 +0000540CodeGenTypes::arrangeMSCtorClosure(const CXXConstructorDecl *CD,
541 CXXCtorType CT) {
542 assert(CT == Ctor_CopyingClosure || CT == Ctor_DefaultClosure);
543
David Majnemerdfa6d202015-03-11 18:36:39 +0000544 CanQual<FunctionProtoType> FTP = GetFormalType(CD);
545 SmallVector<CanQualType, 2> ArgTys;
546 const CXXRecordDecl *RD = CD->getParent();
James Y Knightb92d2902019-02-05 16:05:50 +0000547 ArgTys.push_back(DeriveThisType(RD, CD));
David Majnemer37fd66e2015-03-13 22:36:55 +0000548 if (CT == Ctor_CopyingClosure)
549 ArgTys.push_back(*FTP->param_type_begin());
David Majnemerdfa6d202015-03-11 18:36:39 +0000550 if (RD->getNumVBases() > 0)
551 ArgTys.push_back(Context.IntTy);
552 CallingConv CC = Context.getDefaultCallingConvention(
553 /*IsVariadic=*/false, /*IsCXXMethod=*/true);
554 return arrangeLLVMFunctionInfo(Context.VoidTy, /*instanceMethod=*/true,
555 /*chainCall=*/false, ArgTys,
John McCallc56a8b32016-03-11 04:30:31 +0000556 FunctionType::ExtInfo(CC), {},
557 RequiredArgs::All);
David Majnemerdfa6d202015-03-11 18:36:39 +0000558}
559
John McCallc818bbb2012-12-07 07:03:17 +0000560/// Arrange a call as unto a free function, except possibly with an
561/// additional number of formal parameters considered required.
562static const CGFunctionInfo &
563arrangeFreeFunctionLikeCall(CodeGenTypes &CGT,
Mark Lacey23455752013-10-10 20:57:00 +0000564 CodeGenModule &CGM,
John McCallc818bbb2012-12-07 07:03:17 +0000565 const CallArgList &args,
566 const FunctionType *fnType,
Peter Collingbournef7706832014-12-12 23:41:25 +0000567 unsigned numExtraRequiredArgs,
568 bool chainCall) {
John McCallc818bbb2012-12-07 07:03:17 +0000569 assert(args.size() >= numExtraRequiredArgs);
570
John McCallc56a8b32016-03-11 04:30:31 +0000571 llvm::SmallVector<FunctionProtoType::ExtParameterInfo, 16> paramInfos;
572
John McCallc818bbb2012-12-07 07:03:17 +0000573 // In most cases, there are no optional arguments.
574 RequiredArgs required = RequiredArgs::All;
575
576 // If we have a variadic prototype, the required arguments are the
577 // extra prefix plus the arguments in the prototype.
578 if (const FunctionProtoType *proto = dyn_cast<FunctionProtoType>(fnType)) {
579 if (proto->isVariadic())
James Y Knightb92d2902019-02-05 16:05:50 +0000580 required = RequiredArgs::forPrototypePlus(proto, numExtraRequiredArgs);
John McCallc818bbb2012-12-07 07:03:17 +0000581
John McCallc56a8b32016-03-11 04:30:31 +0000582 if (proto->hasExtParameterInfos())
583 addExtParameterInfosForCall(paramInfos, proto, numExtraRequiredArgs,
584 args.size());
585
John McCallc818bbb2012-12-07 07:03:17 +0000586 // If we don't have a prototype at all, but we're supposed to
587 // explicitly use the variadic convention for unprototyped calls,
588 // treat all of the arguments as required but preserve the nominal
589 // possibility of variadics.
Mark Lacey23455752013-10-10 20:57:00 +0000590 } else if (CGM.getTargetCodeGenInfo()
591 .isNoProtoCallVariadic(args,
592 cast<FunctionNoProtoType>(fnType))) {
John McCallc818bbb2012-12-07 07:03:17 +0000593 required = RequiredArgs(args.size());
594 }
595
Peter Collingbournef7706832014-12-12 23:41:25 +0000596 // FIXME: Kill copy.
597 SmallVector<CanQualType, 16> argTypes;
598 for (const auto &arg : args)
599 argTypes.push_back(CGT.getContext().getCanonicalParamType(arg.Ty));
600 return CGT.arrangeLLVMFunctionInfo(GetReturnType(fnType->getReturnType()),
601 /*instanceMethod=*/false, chainCall,
John McCallc56a8b32016-03-11 04:30:31 +0000602 argTypes, fnType->getExtInfo(), paramInfos,
603 required);
John McCallc818bbb2012-12-07 07:03:17 +0000604}
605
John McCalla729c622012-02-17 03:33:10 +0000606/// Figure out the rules for calling a function with the given formal
607/// type using the given arguments. The arguments are necessary
608/// because the function might be unprototyped, in which case it's
609/// target-dependent in crazy ways.
610const CGFunctionInfo &
John McCall8dda7b22012-07-07 06:41:13 +0000611CodeGenTypes::arrangeFreeFunctionCall(const CallArgList &args,
Peter Collingbournef7706832014-12-12 23:41:25 +0000612 const FunctionType *fnType,
613 bool chainCall) {
614 return arrangeFreeFunctionLikeCall(*this, CGM, args, fnType,
615 chainCall ? 1 : 0, chainCall);
John McCallc818bbb2012-12-07 07:03:17 +0000616}
John McCalla729c622012-02-17 03:33:10 +0000617
John McCallc56a8b32016-03-11 04:30:31 +0000618/// A block function is essentially a free function with an
John McCallc818bbb2012-12-07 07:03:17 +0000619/// extra implicit argument.
620const CGFunctionInfo &
621CodeGenTypes::arrangeBlockFunctionCall(const CallArgList &args,
622 const FunctionType *fnType) {
Peter Collingbournef7706832014-12-12 23:41:25 +0000623 return arrangeFreeFunctionLikeCall(*this, CGM, args, fnType, 1,
624 /*chainCall=*/false);
John McCalla729c622012-02-17 03:33:10 +0000625}
626
627const CGFunctionInfo &
John McCallc56a8b32016-03-11 04:30:31 +0000628CodeGenTypes::arrangeBlockFunctionDeclaration(const FunctionProtoType *proto,
629 const FunctionArgList &params) {
630 auto paramInfos = getExtParameterInfosForCall(proto, 1, params.size());
631 auto argTypes = getArgTypesForDeclaration(Context, params);
632
James Y Knight916db652019-02-02 01:48:23 +0000633 return arrangeLLVMFunctionInfo(GetReturnType(proto->getReturnType()),
634 /*instanceMethod*/ false, /*chainCall*/ false,
635 argTypes, proto->getExtInfo(), paramInfos,
636 RequiredArgs::forPrototypePlus(proto, 1));
John McCallc56a8b32016-03-11 04:30:31 +0000637}
638
639const CGFunctionInfo &
640CodeGenTypes::arrangeBuiltinFunctionCall(QualType resultType,
641 const CallArgList &args) {
Daniel Dunbarbf8c24a2009-02-02 23:23:47 +0000642 // FIXME: Kill copy.
John McCalla729c622012-02-17 03:33:10 +0000643 SmallVector<CanQualType, 16> argTypes;
Alexey Samsonov3551e312014-08-13 20:06:24 +0000644 for (const auto &Arg : args)
645 argTypes.push_back(Context.getCanonicalParamType(Arg.Ty));
Peter Collingbournef7706832014-12-12 23:41:25 +0000646 return arrangeLLVMFunctionInfo(
647 GetReturnType(resultType), /*instanceMethod=*/false,
John McCallc56a8b32016-03-11 04:30:31 +0000648 /*chainCall=*/false, argTypes, FunctionType::ExtInfo(),
649 /*paramInfos=*/ {}, RequiredArgs::All);
John McCall8dda7b22012-07-07 06:41:13 +0000650}
651
John McCallc56a8b32016-03-11 04:30:31 +0000652const CGFunctionInfo &
653CodeGenTypes::arrangeBuiltinFunctionDeclaration(QualType resultType,
654 const FunctionArgList &args) {
655 auto argTypes = getArgTypesForDeclaration(Context, args);
656
657 return arrangeLLVMFunctionInfo(
658 GetReturnType(resultType), /*instanceMethod=*/false, /*chainCall=*/false,
659 argTypes, FunctionType::ExtInfo(), {}, RequiredArgs::All);
660}
661
662const CGFunctionInfo &
663CodeGenTypes::arrangeBuiltinFunctionDeclaration(CanQualType resultType,
664 ArrayRef<CanQualType> argTypes) {
665 return arrangeLLVMFunctionInfo(
666 resultType, /*instanceMethod=*/false, /*chainCall=*/false,
667 argTypes, FunctionType::ExtInfo(), {}, RequiredArgs::All);
668}
669
John McCall8dda7b22012-07-07 06:41:13 +0000670/// Arrange a call to a C++ method, passing the given arguments.
George Burgess IVd0a9e802017-02-23 22:07:35 +0000671///
672/// numPrefixArgs is the number of ABI-specific prefix arguments we have. It
673/// does not count `this`.
John McCall8dda7b22012-07-07 06:41:13 +0000674const CGFunctionInfo &
675CodeGenTypes::arrangeCXXMethodCall(const CallArgList &args,
John McCallc56a8b32016-03-11 04:30:31 +0000676 const FunctionProtoType *proto,
George Burgess IVd0a9e802017-02-23 22:07:35 +0000677 RequiredArgs required,
678 unsigned numPrefixArgs) {
679 assert(numPrefixArgs + 1 <= args.size() &&
680 "Emitting a call with less args than the required prefix?");
681 // Add one to account for `this`. It's a bit awkward here, but we don't count
682 // `this` in similar places elsewhere.
John McCallc56a8b32016-03-11 04:30:31 +0000683 auto paramInfos =
George Burgess IVd0a9e802017-02-23 22:07:35 +0000684 getExtParameterInfosForCall(proto, numPrefixArgs + 1, args.size());
John McCallc56a8b32016-03-11 04:30:31 +0000685
John McCall8dda7b22012-07-07 06:41:13 +0000686 // FIXME: Kill copy.
John McCallc56a8b32016-03-11 04:30:31 +0000687 auto argTypes = getArgTypesForCall(Context, args);
John McCall8dda7b22012-07-07 06:41:13 +0000688
John McCallc56a8b32016-03-11 04:30:31 +0000689 FunctionType::ExtInfo info = proto->getExtInfo();
Peter Collingbournef7706832014-12-12 23:41:25 +0000690 return arrangeLLVMFunctionInfo(
John McCallc56a8b32016-03-11 04:30:31 +0000691 GetReturnType(proto->getReturnType()), /*instanceMethod=*/true,
692 /*chainCall=*/false, argTypes, info, paramInfos, required);
Daniel Dunbarbf8c24a2009-02-02 23:23:47 +0000693}
694
John McCalla729c622012-02-17 03:33:10 +0000695const CGFunctionInfo &CodeGenTypes::arrangeNullaryFunction() {
Peter Collingbournef7706832014-12-12 23:41:25 +0000696 return arrangeLLVMFunctionInfo(
697 getContext().VoidTy, /*instanceMethod=*/false, /*chainCall=*/false,
John McCallc56a8b32016-03-11 04:30:31 +0000698 None, FunctionType::ExtInfo(), {}, RequiredArgs::All);
699}
700
701const CGFunctionInfo &
702CodeGenTypes::arrangeCall(const CGFunctionInfo &signature,
703 const CallArgList &args) {
704 assert(signature.arg_size() <= args.size());
705 if (signature.arg_size() == args.size())
706 return signature;
707
708 SmallVector<FunctionProtoType::ExtParameterInfo, 16> paramInfos;
709 auto sigParamInfos = signature.getExtParameterInfos();
710 if (!sigParamInfos.empty()) {
711 paramInfos.append(sigParamInfos.begin(), sigParamInfos.end());
712 paramInfos.resize(args.size());
713 }
714
715 auto argTypes = getArgTypesForCall(Context, args);
716
717 assert(signature.getRequiredArgs().allowsOptionalArgs());
718 return arrangeLLVMFunctionInfo(signature.getReturnType(),
719 signature.isInstanceMethod(),
720 signature.isChainCall(),
721 argTypes,
722 signature.getExtInfo(),
723 paramInfos,
724 signature.getRequiredArgs());
John McCalla738c252011-03-09 04:27:21 +0000725}
726
Pekka Jaaskelainenfc2629a2017-06-01 07:18:49 +0000727namespace clang {
728namespace CodeGen {
729void computeSPIRKernelABIInfo(CodeGenModule &CGM, CGFunctionInfo &FI);
730}
731}
732
John McCalla729c622012-02-17 03:33:10 +0000733/// Arrange the argument and result information for an abstract value
734/// of a given function type. This is the method which all of the
735/// above functions ultimately defer to.
736const CGFunctionInfo &
John McCall8dda7b22012-07-07 06:41:13 +0000737CodeGenTypes::arrangeLLVMFunctionInfo(CanQualType resultType,
Peter Collingbournef7706832014-12-12 23:41:25 +0000738 bool instanceMethod,
739 bool chainCall,
John McCall8dda7b22012-07-07 06:41:13 +0000740 ArrayRef<CanQualType> argTypes,
741 FunctionType::ExtInfo info,
John McCallc56a8b32016-03-11 04:30:31 +0000742 ArrayRef<FunctionProtoType::ExtParameterInfo> paramInfos,
John McCall8dda7b22012-07-07 06:41:13 +0000743 RequiredArgs required) {
Fangrui Song3117b172018-10-20 17:53:42 +0000744 assert(llvm::all_of(argTypes,
745 [](CanQualType T) { return T.isCanonicalAsParam(); }));
John McCall2da83a32010-02-26 00:48:12 +0000746
Daniel Dunbare0be8292009-02-03 00:07:12 +0000747 // Lookup or create unique function info.
748 llvm::FoldingSetNodeID ID;
John McCallc56a8b32016-03-11 04:30:31 +0000749 CGFunctionInfo::Profile(ID, instanceMethod, chainCall, info, paramInfos,
750 required, resultType, argTypes);
Daniel Dunbare0be8292009-02-03 00:07:12 +0000751
Craig Topper8a13c412014-05-21 05:09:00 +0000752 void *insertPos = nullptr;
John McCalla729c622012-02-17 03:33:10 +0000753 CGFunctionInfo *FI = FunctionInfos.FindNodeOrInsertPos(ID, insertPos);
Daniel Dunbare0be8292009-02-03 00:07:12 +0000754 if (FI)
755 return *FI;
756
John McCallc56a8b32016-03-11 04:30:31 +0000757 unsigned CC = ClangCallConvToLLVMCallConv(info.getCC());
758
John McCalla729c622012-02-17 03:33:10 +0000759 // Construct the function info. We co-allocate the ArgInfos.
Peter Collingbournef7706832014-12-12 23:41:25 +0000760 FI = CGFunctionInfo::create(CC, instanceMethod, chainCall, info,
John McCallc56a8b32016-03-11 04:30:31 +0000761 paramInfos, resultType, argTypes, required);
John McCalla729c622012-02-17 03:33:10 +0000762 FunctionInfos.InsertNode(FI, insertPos);
Daniel Dunbar313321e2009-02-03 05:31:23 +0000763
David Blaikie82e95a32014-11-19 07:49:47 +0000764 bool inserted = FunctionsBeingProcessed.insert(FI).second;
765 (void)inserted;
John McCalla729c622012-02-17 03:33:10 +0000766 assert(inserted && "Recursively being processed?");
Pekka Jaaskelainenfc2629a2017-06-01 07:18:49 +0000767
Daniel Dunbar313321e2009-02-03 05:31:23 +0000768 // Compute ABI information.
Pekka Jaaskelainenfc2629a2017-06-01 07:18:49 +0000769 if (CC == llvm::CallingConv::SPIR_KERNEL) {
770 // Force target independent argument handling for the host visible
771 // kernel functions.
772 computeSPIRKernelABIInfo(CGM, *FI);
773 } else if (info.getCC() == CC_Swift) {
John McCall12f23522016-04-04 18:33:08 +0000774 swiftcall::computeABIInfo(CGM, *FI);
Pekka Jaaskelainenfc2629a2017-06-01 07:18:49 +0000775 } else {
776 getABIInfo().computeInfo(*FI);
John McCall12f23522016-04-04 18:33:08 +0000777 }
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +0000778
Chris Lattnerfe34c1d2010-07-29 06:26:06 +0000779 // Loop over all of the computed argument and return value info. If any of
780 // them are direct or extend without a specified coerce type, specify the
781 // default now.
John McCalla729c622012-02-17 03:33:10 +0000782 ABIArgInfo &retInfo = FI->getReturnInfo();
Craig Topper8a13c412014-05-21 05:09:00 +0000783 if (retInfo.canHaveCoerceToType() && retInfo.getCoerceToType() == nullptr)
John McCalla729c622012-02-17 03:33:10 +0000784 retInfo.setCoerceToType(ConvertType(FI->getReturnType()));
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +0000785
Aaron Ballmanec47bc22014-03-17 18:10:01 +0000786 for (auto &I : FI->arguments())
Craig Topper8a13c412014-05-21 05:09:00 +0000787 if (I.info.canHaveCoerceToType() && I.info.getCoerceToType() == nullptr)
Aaron Ballmanec47bc22014-03-17 18:10:01 +0000788 I.info.setCoerceToType(ConvertType(I.type));
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +0000789
John McCalla729c622012-02-17 03:33:10 +0000790 bool erased = FunctionsBeingProcessed.erase(FI); (void)erased;
791 assert(erased && "Not in set?");
Fangrui Song6907ce22018-07-30 19:24:48 +0000792
Daniel Dunbare0be8292009-02-03 00:07:12 +0000793 return *FI;
Daniel Dunbarbf8c24a2009-02-02 23:23:47 +0000794}
795
John McCalla729c622012-02-17 03:33:10 +0000796CGFunctionInfo *CGFunctionInfo::create(unsigned llvmCC,
Peter Collingbournef7706832014-12-12 23:41:25 +0000797 bool instanceMethod,
798 bool chainCall,
John McCalla729c622012-02-17 03:33:10 +0000799 const FunctionType::ExtInfo &info,
John McCallc56a8b32016-03-11 04:30:31 +0000800 ArrayRef<ExtParameterInfo> paramInfos,
John McCalla729c622012-02-17 03:33:10 +0000801 CanQualType resultType,
802 ArrayRef<CanQualType> argTypes,
803 RequiredArgs required) {
John McCallc56a8b32016-03-11 04:30:31 +0000804 assert(paramInfos.empty() || paramInfos.size() == argTypes.size());
James Y Knightb92d2902019-02-05 16:05:50 +0000805 assert(!required.allowsOptionalArgs() ||
806 required.getNumRequiredArgs() <= argTypes.size());
John McCallc56a8b32016-03-11 04:30:31 +0000807
808 void *buffer =
809 operator new(totalSizeToAlloc<ArgInfo, ExtParameterInfo>(
810 argTypes.size() + 1, paramInfos.size()));
811
John McCalla729c622012-02-17 03:33:10 +0000812 CGFunctionInfo *FI = new(buffer) CGFunctionInfo();
813 FI->CallingConvention = llvmCC;
814 FI->EffectiveCallingConvention = llvmCC;
815 FI->ASTCallingConvention = info.getCC();
Peter Collingbournef7706832014-12-12 23:41:25 +0000816 FI->InstanceMethod = instanceMethod;
817 FI->ChainCall = chainCall;
Momchil Velikov080d0462020-03-24 09:32:51 +0000818 FI->CmseNSCall = info.getCmseNSCall();
John McCalla729c622012-02-17 03:33:10 +0000819 FI->NoReturn = info.getNoReturn();
820 FI->ReturnsRetained = info.getProducesResult();
Oren Ben Simhon318a6ea2017-04-27 12:01:00 +0000821 FI->NoCallerSavedRegs = info.getNoCallerSavedRegs();
Oren Ben Simhon220671a2018-03-17 13:31:35 +0000822 FI->NoCfCheck = info.getNoCfCheck();
John McCalla729c622012-02-17 03:33:10 +0000823 FI->Required = required;
824 FI->HasRegParm = info.getHasRegParm();
825 FI->RegParm = info.getRegParm();
Craig Topper8a13c412014-05-21 05:09:00 +0000826 FI->ArgStruct = nullptr;
John McCall7f416cc2015-09-08 08:05:57 +0000827 FI->ArgStructAlign = 0;
John McCalla729c622012-02-17 03:33:10 +0000828 FI->NumArgs = argTypes.size();
John McCallc56a8b32016-03-11 04:30:31 +0000829 FI->HasExtParameterInfos = !paramInfos.empty();
John McCalla729c622012-02-17 03:33:10 +0000830 FI->getArgsBuffer()[0].type = resultType;
831 for (unsigned i = 0, e = argTypes.size(); i != e; ++i)
832 FI->getArgsBuffer()[i + 1].type = argTypes[i];
John McCallc56a8b32016-03-11 04:30:31 +0000833 for (unsigned i = 0, e = paramInfos.size(); i != e; ++i)
834 FI->getExtParameterInfosBuffer()[i] = paramInfos[i];
John McCalla729c622012-02-17 03:33:10 +0000835 return FI;
Daniel Dunbar313321e2009-02-03 05:31:23 +0000836}
837
838/***/
839
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000840namespace {
841// ABIArgInfo::Expand implementation.
842
843// Specifies the way QualType passed as ABIArgInfo::Expand is expanded.
844struct TypeExpansion {
845 enum TypeExpansionKind {
846 // Elements of constant arrays are expanded recursively.
847 TEK_ConstantArray,
848 // Record fields are expanded recursively (but if record is a union, only
849 // the field with the largest size is expanded).
850 TEK_Record,
851 // For complex types, real and imaginary parts are expanded recursively.
852 TEK_Complex,
853 // All other types are not expandable.
854 TEK_None
855 };
856
857 const TypeExpansionKind Kind;
858
859 TypeExpansion(TypeExpansionKind K) : Kind(K) {}
Angel Garcia Gomez637d1e62015-10-20 13:23:58 +0000860 virtual ~TypeExpansion() {}
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000861};
862
863struct ConstantArrayExpansion : TypeExpansion {
864 QualType EltTy;
865 uint64_t NumElts;
866
867 ConstantArrayExpansion(QualType EltTy, uint64_t NumElts)
868 : TypeExpansion(TEK_ConstantArray), EltTy(EltTy), NumElts(NumElts) {}
869 static bool classof(const TypeExpansion *TE) {
870 return TE->Kind == TEK_ConstantArray;
871 }
872};
873
874struct RecordExpansion : TypeExpansion {
Reid Klecknere9f6a712014-10-31 17:10:41 +0000875 SmallVector<const CXXBaseSpecifier *, 1> Bases;
876
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000877 SmallVector<const FieldDecl *, 1> Fields;
878
Reid Klecknere9f6a712014-10-31 17:10:41 +0000879 RecordExpansion(SmallVector<const CXXBaseSpecifier *, 1> &&Bases,
880 SmallVector<const FieldDecl *, 1> &&Fields)
Benjamin Kramer0bb97742016-02-13 16:00:13 +0000881 : TypeExpansion(TEK_Record), Bases(std::move(Bases)),
882 Fields(std::move(Fields)) {}
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000883 static bool classof(const TypeExpansion *TE) {
884 return TE->Kind == TEK_Record;
885 }
886};
887
888struct ComplexExpansion : TypeExpansion {
889 QualType EltTy;
890
891 ComplexExpansion(QualType EltTy) : TypeExpansion(TEK_Complex), EltTy(EltTy) {}
892 static bool classof(const TypeExpansion *TE) {
893 return TE->Kind == TEK_Complex;
894 }
895};
896
897struct NoExpansion : TypeExpansion {
898 NoExpansion() : TypeExpansion(TEK_None) {}
899 static bool classof(const TypeExpansion *TE) {
900 return TE->Kind == TEK_None;
901 }
902};
903} // namespace
904
905static std::unique_ptr<TypeExpansion>
906getTypeExpansion(QualType Ty, const ASTContext &Context) {
907 if (const ConstantArrayType *AT = Context.getAsConstantArrayType(Ty)) {
Jonas Devlieghere2b3d49b2019-08-14 23:04:18 +0000908 return std::make_unique<ConstantArrayExpansion>(
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000909 AT->getElementType(), AT->getSize().getZExtValue());
910 }
911 if (const RecordType *RT = Ty->getAs<RecordType>()) {
Reid Klecknere9f6a712014-10-31 17:10:41 +0000912 SmallVector<const CXXBaseSpecifier *, 1> Bases;
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000913 SmallVector<const FieldDecl *, 1> Fields;
Bob Wilsone826a2a2011-08-03 05:58:22 +0000914 const RecordDecl *RD = RT->getDecl();
915 assert(!RD->hasFlexibleArrayMember() &&
916 "Cannot expand structure with flexible array.");
Anton Korobeynikov4215ca72012-04-13 11:22:00 +0000917 if (RD->isUnion()) {
918 // Unions can be here only in degenerative cases - all the fields are same
919 // after flattening. Thus we have to use the "largest" field.
Craig Topper8a13c412014-05-21 05:09:00 +0000920 const FieldDecl *LargestFD = nullptr;
Anton Korobeynikov4215ca72012-04-13 11:22:00 +0000921 CharUnits UnionSize = CharUnits::Zero();
922
Aaron Ballmane8a8bae2014-03-08 20:12:42 +0000923 for (const auto *FD : RD->fields()) {
Richard Smith866dee42018-04-02 18:29:43 +0000924 if (FD->isZeroLengthBitField(Context))
Reid Kleckner80944df2014-10-31 22:00:51 +0000925 continue;
Anton Korobeynikov4215ca72012-04-13 11:22:00 +0000926 assert(!FD->isBitField() &&
927 "Cannot expand structure with bit-field members.");
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000928 CharUnits FieldSize = Context.getTypeSizeInChars(FD->getType());
Anton Korobeynikov4215ca72012-04-13 11:22:00 +0000929 if (UnionSize < FieldSize) {
930 UnionSize = FieldSize;
931 LargestFD = FD;
932 }
933 }
934 if (LargestFD)
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000935 Fields.push_back(LargestFD);
Anton Korobeynikov4215ca72012-04-13 11:22:00 +0000936 } else {
Reid Klecknere9f6a712014-10-31 17:10:41 +0000937 if (const auto *CXXRD = dyn_cast<CXXRecordDecl>(RD)) {
938 assert(!CXXRD->isDynamicClass() &&
939 "cannot expand vtable pointers in dynamic classes");
940 for (const CXXBaseSpecifier &BS : CXXRD->bases())
941 Bases.push_back(&BS);
942 }
943
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000944 for (const auto *FD : RD->fields()) {
Richard Smith866dee42018-04-02 18:29:43 +0000945 if (FD->isZeroLengthBitField(Context))
Reid Kleckner80944df2014-10-31 22:00:51 +0000946 continue;
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000947 assert(!FD->isBitField() &&
Anton Korobeynikov4215ca72012-04-13 11:22:00 +0000948 "Cannot expand structure with bit-field members.");
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000949 Fields.push_back(FD);
Anton Korobeynikov4215ca72012-04-13 11:22:00 +0000950 }
Bob Wilsone826a2a2011-08-03 05:58:22 +0000951 }
Jonas Devlieghere2b3d49b2019-08-14 23:04:18 +0000952 return std::make_unique<RecordExpansion>(std::move(Bases),
Reid Klecknere9f6a712014-10-31 17:10:41 +0000953 std::move(Fields));
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000954 }
955 if (const ComplexType *CT = Ty->getAs<ComplexType>()) {
Jonas Devlieghere2b3d49b2019-08-14 23:04:18 +0000956 return std::make_unique<ComplexExpansion>(CT->getElementType());
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000957 }
Jonas Devlieghere2b3d49b2019-08-14 23:04:18 +0000958 return std::make_unique<NoExpansion>();
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000959}
960
Alexey Samsonov52c0f6a2014-09-29 20:30:22 +0000961static int getExpansionSize(QualType Ty, const ASTContext &Context) {
962 auto Exp = getTypeExpansion(Ty, Context);
963 if (auto CAExp = dyn_cast<ConstantArrayExpansion>(Exp.get())) {
964 return CAExp->NumElts * getExpansionSize(CAExp->EltTy, Context);
965 }
966 if (auto RExp = dyn_cast<RecordExpansion>(Exp.get())) {
967 int Res = 0;
Reid Klecknere9f6a712014-10-31 17:10:41 +0000968 for (auto BS : RExp->Bases)
969 Res += getExpansionSize(BS->getType(), Context);
Alexey Samsonov52c0f6a2014-09-29 20:30:22 +0000970 for (auto FD : RExp->Fields)
971 Res += getExpansionSize(FD->getType(), Context);
972 return Res;
973 }
974 if (isa<ComplexExpansion>(Exp.get()))
975 return 2;
976 assert(isa<NoExpansion>(Exp.get()));
977 return 1;
978}
979
Alexey Samsonov153004f2014-09-29 22:08:00 +0000980void
981CodeGenTypes::getExpandedTypes(QualType Ty,
982 SmallVectorImpl<llvm::Type *>::iterator &TI) {
983 auto Exp = getTypeExpansion(Ty, Context);
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000984 if (auto CAExp = dyn_cast<ConstantArrayExpansion>(Exp.get())) {
985 for (int i = 0, n = CAExp->NumElts; i < n; i++) {
Alexey Samsonov153004f2014-09-29 22:08:00 +0000986 getExpandedTypes(CAExp->EltTy, TI);
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000987 }
988 } else if (auto RExp = dyn_cast<RecordExpansion>(Exp.get())) {
Reid Klecknere9f6a712014-10-31 17:10:41 +0000989 for (auto BS : RExp->Bases)
990 getExpandedTypes(BS->getType(), TI);
991 for (auto FD : RExp->Fields)
Alexey Samsonov153004f2014-09-29 22:08:00 +0000992 getExpandedTypes(FD->getType(), TI);
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000993 } else if (auto CExp = dyn_cast<ComplexExpansion>(Exp.get())) {
994 llvm::Type *EltTy = ConvertType(CExp->EltTy);
Alexey Samsonov153004f2014-09-29 22:08:00 +0000995 *TI++ = EltTy;
996 *TI++ = EltTy;
Alexey Samsonov8a0bad02014-09-29 18:41:28 +0000997 } else {
998 assert(isa<NoExpansion>(Exp.get()));
Alexey Samsonov153004f2014-09-29 22:08:00 +0000999 *TI++ = ConvertType(Ty);
Alexey Samsonov8a0bad02014-09-29 18:41:28 +00001000 }
Daniel Dunbar8fc81b02008-09-17 00:51:38 +00001001}
1002
John McCall7f416cc2015-09-08 08:05:57 +00001003static void forConstantArrayExpansion(CodeGenFunction &CGF,
1004 ConstantArrayExpansion *CAE,
1005 Address BaseAddr,
1006 llvm::function_ref<void(Address)> Fn) {
1007 CharUnits EltSize = CGF.getContext().getTypeSizeInChars(CAE->EltTy);
1008 CharUnits EltAlign =
1009 BaseAddr.getAlignment().alignmentOfArrayElement(EltSize);
1010
1011 for (int i = 0, n = CAE->NumElts; i < n; i++) {
1012 llvm::Value *EltAddr =
1013 CGF.Builder.CreateConstGEP2_32(nullptr, BaseAddr.getPointer(), 0, i);
1014 Fn(Address(EltAddr, EltAlign));
1015 }
1016}
1017
Alexey Samsonov91cf4552014-08-22 01:06:06 +00001018void CodeGenFunction::ExpandTypeFromArgs(
John McCall12f23522016-04-04 18:33:08 +00001019 QualType Ty, LValue LV, SmallVectorImpl<llvm::Value *>::iterator &AI) {
Mike Stump11289f42009-09-09 15:08:12 +00001020 assert(LV.isSimple() &&
1021 "Unexpected non-simple lvalue during struct expansion.");
Daniel Dunbar8fc81b02008-09-17 00:51:38 +00001022
Alexey Samsonov8a0bad02014-09-29 18:41:28 +00001023 auto Exp = getTypeExpansion(Ty, getContext());
1024 if (auto CAExp = dyn_cast<ConstantArrayExpansion>(Exp.get())) {
Akira Hatanakaf139ae32019-12-03 15:17:01 -08001025 forConstantArrayExpansion(
1026 *this, CAExp, LV.getAddress(*this), [&](Address EltAddr) {
1027 LValue LV = MakeAddrLValue(EltAddr, CAExp->EltTy);
1028 ExpandTypeFromArgs(CAExp->EltTy, LV, AI);
1029 });
Alexey Samsonov8a0bad02014-09-29 18:41:28 +00001030 } else if (auto RExp = dyn_cast<RecordExpansion>(Exp.get())) {
Akira Hatanakaf139ae32019-12-03 15:17:01 -08001031 Address This = LV.getAddress(*this);
Reid Klecknere9f6a712014-10-31 17:10:41 +00001032 for (const CXXBaseSpecifier *BS : RExp->Bases) {
1033 // Perform a single step derived-to-base conversion.
John McCall7f416cc2015-09-08 08:05:57 +00001034 Address Base =
Reid Klecknere9f6a712014-10-31 17:10:41 +00001035 GetAddressOfBaseClass(This, Ty->getAsCXXRecordDecl(), &BS, &BS + 1,
1036 /*NullCheckValue=*/false, SourceLocation());
1037 LValue SubLV = MakeAddrLValue(Base, BS->getType());
1038
1039 // Recurse onto bases.
1040 ExpandTypeFromArgs(BS->getType(), SubLV, AI);
1041 }
Alexey Samsonov8a0bad02014-09-29 18:41:28 +00001042 for (auto FD : RExp->Fields) {
1043 // FIXME: What are the right qualifiers here?
Reid Kleckner9d031092016-05-02 22:42:34 +00001044 LValue SubLV = EmitLValueForFieldInitialization(LV, FD);
Alexey Samsonov8a0bad02014-09-29 18:41:28 +00001045 ExpandTypeFromArgs(FD->getType(), SubLV, AI);
Bob Wilsone826a2a2011-08-03 05:58:22 +00001046 }
John McCall7f416cc2015-09-08 08:05:57 +00001047 } else if (isa<ComplexExpansion>(Exp.get())) {
1048 auto realValue = *AI++;
1049 auto imagValue = *AI++;
1050 EmitStoreOfComplex(ComplexPairTy(realValue, imagValue), LV, /*init*/ true);
Alexey Samsonov8a0bad02014-09-29 18:41:28 +00001051 } else {
Akira Hatanakad8136f12019-11-29 09:56:02 -08001052 // Call EmitStoreOfScalar except when the lvalue is a bitfield to emit a
1053 // primitive store.
Alexey Samsonov8a0bad02014-09-29 18:41:28 +00001054 assert(isa<NoExpansion>(Exp.get()));
Akira Hatanakad8136f12019-11-29 09:56:02 -08001055 if (LV.isBitField())
1056 EmitStoreThroughLValue(RValue::get(*AI++), LV);
1057 else
1058 EmitStoreOfScalar(*AI++, LV);
Daniel Dunbar8fc81b02008-09-17 00:51:38 +00001059 }
Alexey Samsonov8a0bad02014-09-29 18:41:28 +00001060}
1061
1062void CodeGenFunction::ExpandTypeToArgs(
Yaxun Liu5b330e82018-03-15 15:25:19 +00001063 QualType Ty, CallArg Arg, llvm::FunctionType *IRFuncTy,
Alexey Samsonov8a0bad02014-09-29 18:41:28 +00001064 SmallVectorImpl<llvm::Value *> &IRCallArgs, unsigned &IRCallArgPos) {
1065 auto Exp = getTypeExpansion(Ty, getContext());
1066 if (auto CAExp = dyn_cast<ConstantArrayExpansion>(Exp.get())) {
Akira Hatanakaf139ae32019-12-03 15:17:01 -08001067 Address Addr = Arg.hasLValue() ? Arg.getKnownLValue().getAddress(*this)
Yaxun Liu5b330e82018-03-15 15:25:19 +00001068 : Arg.getKnownRValue().getAggregateAddress();
1069 forConstantArrayExpansion(
1070 *this, CAExp, Addr, [&](Address EltAddr) {
1071 CallArg EltArg = CallArg(
1072 convertTempToRValue(EltAddr, CAExp->EltTy, SourceLocation()),
1073 CAExp->EltTy);
1074 ExpandTypeToArgs(CAExp->EltTy, EltArg, IRFuncTy, IRCallArgs,
1075 IRCallArgPos);
1076 });
Alexey Samsonov8a0bad02014-09-29 18:41:28 +00001077 } else if (auto RExp = dyn_cast<RecordExpansion>(Exp.get())) {
Akira Hatanakaf139ae32019-12-03 15:17:01 -08001078 Address This = Arg.hasLValue() ? Arg.getKnownLValue().getAddress(*this)
Yaxun Liu5b330e82018-03-15 15:25:19 +00001079 : Arg.getKnownRValue().getAggregateAddress();
Reid Klecknere9f6a712014-10-31 17:10:41 +00001080 for (const CXXBaseSpecifier *BS : RExp->Bases) {
1081 // Perform a single step derived-to-base conversion.
John McCall7f416cc2015-09-08 08:05:57 +00001082 Address Base =
Reid Klecknere9f6a712014-10-31 17:10:41 +00001083 GetAddressOfBaseClass(This, Ty->getAsCXXRecordDecl(), &BS, &BS + 1,
1084 /*NullCheckValue=*/false, SourceLocation());
Yaxun Liu5b330e82018-03-15 15:25:19 +00001085 CallArg BaseArg = CallArg(RValue::getAggregate(Base), BS->getType());
Reid Klecknere9f6a712014-10-31 17:10:41 +00001086
1087 // Recurse onto bases.
Yaxun Liu5b330e82018-03-15 15:25:19 +00001088 ExpandTypeToArgs(BS->getType(), BaseArg, IRFuncTy, IRCallArgs,
Reid Klecknere9f6a712014-10-31 17:10:41 +00001089 IRCallArgPos);
1090 }
1091
1092 LValue LV = MakeAddrLValue(This, Ty);
Alexey Samsonov8a0bad02014-09-29 18:41:28 +00001093 for (auto FD : RExp->Fields) {
Yaxun Liu5b330e82018-03-15 15:25:19 +00001094 CallArg FldArg =
1095 CallArg(EmitRValueForField(LV, FD, SourceLocation()), FD->getType());
1096 ExpandTypeToArgs(FD->getType(), FldArg, IRFuncTy, IRCallArgs,
Alexey Samsonov8a0bad02014-09-29 18:41:28 +00001097 IRCallArgPos);
1098 }
1099 } else if (isa<ComplexExpansion>(Exp.get())) {
Yaxun Liu5b330e82018-03-15 15:25:19 +00001100 ComplexPairTy CV = Arg.getKnownRValue().getComplexVal();
Alexey Samsonov8a0bad02014-09-29 18:41:28 +00001101 IRCallArgs[IRCallArgPos++] = CV.first;
1102 IRCallArgs[IRCallArgPos++] = CV.second;
1103 } else {
1104 assert(isa<NoExpansion>(Exp.get()));
Yaxun Liu5b330e82018-03-15 15:25:19 +00001105 auto RV = Arg.getKnownRValue();
Alexey Samsonov8a0bad02014-09-29 18:41:28 +00001106 assert(RV.isScalar() &&
1107 "Unexpected non-scalar rvalue during struct expansion.");
1108
1109 // Insert a bitcast as needed.
1110 llvm::Value *V = RV.getScalarVal();
1111 if (IRCallArgPos < IRFuncTy->getNumParams() &&
1112 V->getType() != IRFuncTy->getParamType(IRCallArgPos))
1113 V = Builder.CreateBitCast(V, IRFuncTy->getParamType(IRCallArgPos));
1114
1115 IRCallArgs[IRCallArgPos++] = V;
1116 }
Daniel Dunbar8fc81b02008-09-17 00:51:38 +00001117}
1118
John McCall7f416cc2015-09-08 08:05:57 +00001119/// Create a temporary allocation for the purposes of coercion.
1120static Address CreateTempAllocaForCoercion(CodeGenFunction &CGF, llvm::Type *Ty,
1121 CharUnits MinAlign) {
1122 // Don't use an alignment that's worse than what LLVM would prefer.
1123 auto PrefAlign = CGF.CGM.getDataLayout().getPrefTypeAlignment(Ty);
1124 CharUnits Align = std::max(MinAlign, CharUnits::fromQuantity(PrefAlign));
1125
1126 return CGF.CreateTempAlloca(Ty, Align);
1127}
1128
Chris Lattner895c52b2010-06-27 06:04:18 +00001129/// EnterStructPointerForCoercedAccess - Given a struct pointer that we are
Chris Lattner1cd66982010-06-27 05:56:15 +00001130/// accessing some number of bytes out of it, try to gep into the struct to get
1131/// at its inner goodness. Dive as deep as possible without entering an element
1132/// with an in-memory size smaller than DstSize.
John McCall7f416cc2015-09-08 08:05:57 +00001133static Address
1134EnterStructPointerForCoercedAccess(Address SrcPtr,
Chris Lattner2192fe52011-07-18 04:24:23 +00001135 llvm::StructType *SrcSTy,
Chris Lattner895c52b2010-06-27 06:04:18 +00001136 uint64_t DstSize, CodeGenFunction &CGF) {
Chris Lattner1cd66982010-06-27 05:56:15 +00001137 // We can't dive into a zero-element struct.
1138 if (SrcSTy->getNumElements() == 0) return SrcPtr;
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00001139
Chris Lattner2192fe52011-07-18 04:24:23 +00001140 llvm::Type *FirstElt = SrcSTy->getElementType(0);
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00001141
Chris Lattner1cd66982010-06-27 05:56:15 +00001142 // If the first elt is at least as large as what we're looking for, or if the
James Molloy90d61012014-08-29 10:17:52 +00001143 // first element is the same size as the whole struct, we can enter it. The
1144 // comparison must be made on the store size and not the alloca size. Using
1145 // the alloca size may overstate the size of the load.
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00001146 uint64_t FirstEltSize =
James Molloy90d61012014-08-29 10:17:52 +00001147 CGF.CGM.getDataLayout().getTypeStoreSize(FirstElt);
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00001148 if (FirstEltSize < DstSize &&
James Molloy90d61012014-08-29 10:17:52 +00001149 FirstEltSize < CGF.CGM.getDataLayout().getTypeStoreSize(SrcSTy))
Chris Lattner1cd66982010-06-27 05:56:15 +00001150 return SrcPtr;
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00001151
Chris Lattner1cd66982010-06-27 05:56:15 +00001152 // GEP into the first element.
James Y Knight751fe282019-02-09 22:22:28 +00001153 SrcPtr = CGF.Builder.CreateStructGEP(SrcPtr, 0, "coerce.dive");
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00001154
Chris Lattner1cd66982010-06-27 05:56:15 +00001155 // If the first element is a struct, recurse.
John McCall7f416cc2015-09-08 08:05:57 +00001156 llvm::Type *SrcTy = SrcPtr.getElementType();
Chris Lattner2192fe52011-07-18 04:24:23 +00001157 if (llvm::StructType *SrcSTy = dyn_cast<llvm::StructType>(SrcTy))
Chris Lattner895c52b2010-06-27 06:04:18 +00001158 return EnterStructPointerForCoercedAccess(SrcPtr, SrcSTy, DstSize, CGF);
Chris Lattner1cd66982010-06-27 05:56:15 +00001159
1160 return SrcPtr;
1161}
1162
Chris Lattner055097f2010-06-27 06:26:04 +00001163/// CoerceIntOrPtrToIntOrPtr - Convert a value Val to the specific Ty where both
1164/// are either integers or pointers. This does a truncation of the value if it
1165/// is too large or a zero extension if it is too small.
Jakob Stoklund Olesen36af2522013-06-05 03:00:13 +00001166///
1167/// This behaves as if the value were coerced through memory, so on big-endian
1168/// targets the high bits are preserved in a truncation, while little-endian
1169/// targets preserve the low bits.
Chris Lattner055097f2010-06-27 06:26:04 +00001170static llvm::Value *CoerceIntOrPtrToIntOrPtr(llvm::Value *Val,
Chris Lattner2192fe52011-07-18 04:24:23 +00001171 llvm::Type *Ty,
Chris Lattner055097f2010-06-27 06:26:04 +00001172 CodeGenFunction &CGF) {
1173 if (Val->getType() == Ty)
1174 return Val;
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00001175
Chris Lattner055097f2010-06-27 06:26:04 +00001176 if (isa<llvm::PointerType>(Val->getType())) {
1177 // If this is Pointer->Pointer avoid conversion to and from int.
1178 if (isa<llvm::PointerType>(Ty))
1179 return CGF.Builder.CreateBitCast(Val, Ty, "coerce.val");
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00001180
Chris Lattner055097f2010-06-27 06:26:04 +00001181 // Convert the pointer to an integer so we can play with its width.
Chris Lattner5e016ae2010-06-27 07:15:29 +00001182 Val = CGF.Builder.CreatePtrToInt(Val, CGF.IntPtrTy, "coerce.val.pi");
Chris Lattner055097f2010-06-27 06:26:04 +00001183 }
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00001184
Chris Lattner2192fe52011-07-18 04:24:23 +00001185 llvm::Type *DestIntTy = Ty;
Chris Lattner055097f2010-06-27 06:26:04 +00001186 if (isa<llvm::PointerType>(DestIntTy))
Chris Lattner5e016ae2010-06-27 07:15:29 +00001187 DestIntTy = CGF.IntPtrTy;
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00001188
Jakob Stoklund Olesen36af2522013-06-05 03:00:13 +00001189 if (Val->getType() != DestIntTy) {
1190 const llvm::DataLayout &DL = CGF.CGM.getDataLayout();
1191 if (DL.isBigEndian()) {
1192 // Preserve the high bits on big-endian targets.
1193 // That is what memory coercion does.
James Molloy491cefb2014-05-07 17:41:15 +00001194 uint64_t SrcSize = DL.getTypeSizeInBits(Val->getType());
1195 uint64_t DstSize = DL.getTypeSizeInBits(DestIntTy);
1196
Jakob Stoklund Olesen36af2522013-06-05 03:00:13 +00001197 if (SrcSize > DstSize) {
1198 Val = CGF.Builder.CreateLShr(Val, SrcSize - DstSize, "coerce.highbits");
1199 Val = CGF.Builder.CreateTrunc(Val, DestIntTy, "coerce.val.ii");
1200 } else {
1201 Val = CGF.Builder.CreateZExt(Val, DestIntTy, "coerce.val.ii");
1202 Val = CGF.Builder.CreateShl(Val, DstSize - SrcSize, "coerce.highbits");
1203 }
1204 } else {
1205 // Little-endian targets preserve the low bits. No shifts required.
1206 Val = CGF.Builder.CreateIntCast(Val, DestIntTy, false, "coerce.val.ii");
1207 }
1208 }
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00001209
Chris Lattner055097f2010-06-27 06:26:04 +00001210 if (isa<llvm::PointerType>(Ty))
1211 Val = CGF.Builder.CreateIntToPtr(Val, Ty, "coerce.val.ip");
1212 return Val;
1213}
1214
Chris Lattner1cd66982010-06-27 05:56:15 +00001215
1216
Daniel Dunbarf5589ac2009-02-02 19:06:38 +00001217/// CreateCoercedLoad - Create a load from \arg SrcPtr interpreted as
Ulrich Weigand6e2cea62015-07-10 11:31:43 +00001218/// a pointer to an object of type \arg Ty, known to be aligned to
1219/// \arg SrcAlign bytes.
Daniel Dunbarf5589ac2009-02-02 19:06:38 +00001220///
1221/// This safely handles the case when the src type is smaller than the
1222/// destination type; in this situation the values of bits which not
1223/// present in the src are undefined.
John McCall7f416cc2015-09-08 08:05:57 +00001224static llvm::Value *CreateCoercedLoad(Address Src, llvm::Type *Ty,
Daniel Dunbarf5589ac2009-02-02 19:06:38 +00001225 CodeGenFunction &CGF) {
John McCall7f416cc2015-09-08 08:05:57 +00001226 llvm::Type *SrcTy = Src.getElementType();
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00001227
Chris Lattnerd200eda2010-06-28 22:51:39 +00001228 // If SrcTy and Ty are the same, just do a load.
1229 if (SrcTy == Ty)
John McCall7f416cc2015-09-08 08:05:57 +00001230 return CGF.Builder.CreateLoad(Src);
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00001231
Micah Villmowdd31ca12012-10-08 16:25:52 +00001232 uint64_t DstSize = CGF.CGM.getDataLayout().getTypeAllocSize(Ty);
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00001233
Chris Lattner2192fe52011-07-18 04:24:23 +00001234 if (llvm::StructType *SrcSTy = dyn_cast<llvm::StructType>(SrcTy)) {
John McCall7f416cc2015-09-08 08:05:57 +00001235 Src = EnterStructPointerForCoercedAccess(Src, SrcSTy, DstSize, CGF);
Eli Friedman83fa8112020-04-03 15:11:40 -07001236 SrcTy = Src.getElementType();
Chris Lattner1cd66982010-06-27 05:56:15 +00001237 }
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00001238
Micah Villmowdd31ca12012-10-08 16:25:52 +00001239 uint64_t SrcSize = CGF.CGM.getDataLayout().getTypeAllocSize(SrcTy);
Daniel Dunbarf5589ac2009-02-02 19:06:38 +00001240
Chris Lattner055097f2010-06-27 06:26:04 +00001241 // If the source and destination are integer or pointer types, just do an
1242 // extension or truncation to the desired type.
1243 if ((isa<llvm::IntegerType>(Ty) || isa<llvm::PointerType>(Ty)) &&
1244 (isa<llvm::IntegerType>(SrcTy) || isa<llvm::PointerType>(SrcTy))) {
John McCall7f416cc2015-09-08 08:05:57 +00001245 llvm::Value *Load = CGF.Builder.CreateLoad(Src);
Chris Lattner055097f2010-06-27 06:26:04 +00001246 return CoerceIntOrPtrToIntOrPtr(Load, Ty, CGF);
1247 }
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00001248
Daniel Dunbarb52d0772009-02-03 05:59:18 +00001249 // If load is legal, just bitcast the src pointer.
Daniel Dunbarffdb8432009-05-13 18:54:26 +00001250 if (SrcSize >= DstSize) {
Mike Stump18bb9282009-05-16 07:57:57 +00001251 // Generally SrcSize is never greater than DstSize, since this means we are
1252 // losing bits. However, this can happen in cases where the structure has
1253 // additional padding, for example due to a user specified alignment.
Daniel Dunbarffdb8432009-05-13 18:54:26 +00001254 //
Mike Stump18bb9282009-05-16 07:57:57 +00001255 // FIXME: Assert that we aren't truncating non-padding bits when have access
1256 // to that information.
Matt Arsenault7a124f32017-08-01 20:36:57 +00001257 Src = CGF.Builder.CreateBitCast(Src,
1258 Ty->getPointerTo(Src.getAddressSpace()));
John McCall7f416cc2015-09-08 08:05:57 +00001259 return CGF.Builder.CreateLoad(Src);
Daniel Dunbarf5589ac2009-02-02 19:06:38 +00001260 }
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00001261
John McCall7f416cc2015-09-08 08:05:57 +00001262 // Otherwise do coercion through memory. This is stupid, but simple.
1263 Address Tmp = CreateTempAllocaForCoercion(CGF, Ty, Src.getAlignment());
Yaxun Liu4bbdebc2018-11-08 16:55:46 +00001264 Address Casted = CGF.Builder.CreateElementBitCast(Tmp,CGF.Int8Ty);
1265 Address SrcCasted = CGF.Builder.CreateElementBitCast(Src,CGF.Int8Ty);
Manman Ren84b921f2012-11-28 22:08:52 +00001266 CGF.Builder.CreateMemCpy(Casted, SrcCasted,
1267 llvm::ConstantInt::get(CGF.IntPtrTy, SrcSize),
John McCall7f416cc2015-09-08 08:05:57 +00001268 false);
1269 return CGF.Builder.CreateLoad(Tmp);
Daniel Dunbarf5589ac2009-02-02 19:06:38 +00001270}
1271
Eli Friedmanaf9b3252011-05-17 21:08:01 +00001272// Function to store a first-class aggregate into memory. We prefer to
1273// store the elements rather than the aggregate to be more friendly to
1274// fast-isel.
1275// FIXME: Do we need to recurse here?
1276static void BuildAggStore(CodeGenFunction &CGF, llvm::Value *Val,
John McCall7f416cc2015-09-08 08:05:57 +00001277 Address Dest, bool DestIsVolatile) {
Eli Friedmanaf9b3252011-05-17 21:08:01 +00001278 // Prefer scalar stores to first-class aggregate stores.
Chris Lattner2192fe52011-07-18 04:24:23 +00001279 if (llvm::StructType *STy =
Eli Friedmanaf9b3252011-05-17 21:08:01 +00001280 dyn_cast<llvm::StructType>(Val->getType())) {
1281 for (unsigned i = 0, e = STy->getNumElements(); i != e; ++i) {
James Y Knight751fe282019-02-09 22:22:28 +00001282 Address EltPtr = CGF.Builder.CreateStructGEP(Dest, i);
Eli Friedmanaf9b3252011-05-17 21:08:01 +00001283 llvm::Value *Elt = CGF.Builder.CreateExtractValue(Val, i);
John McCall7f416cc2015-09-08 08:05:57 +00001284 CGF.Builder.CreateStore(Elt, EltPtr, DestIsVolatile);
Eli Friedmanaf9b3252011-05-17 21:08:01 +00001285 }
1286 } else {
John McCall7f416cc2015-09-08 08:05:57 +00001287 CGF.Builder.CreateStore(Val, Dest, DestIsVolatile);
Eli Friedmanaf9b3252011-05-17 21:08:01 +00001288 }
1289}
1290
Daniel Dunbarf5589ac2009-02-02 19:06:38 +00001291/// CreateCoercedStore - Create a store to \arg DstPtr from \arg Src,
Ulrich Weigand6e2cea62015-07-10 11:31:43 +00001292/// where the source and destination may have different types. The
1293/// destination is known to be aligned to \arg DstAlign bytes.
Daniel Dunbarf5589ac2009-02-02 19:06:38 +00001294///
1295/// This safely handles the case when the src type is larger than the
1296/// destination type; the upper bits of the src will be lost.
1297static void CreateCoercedStore(llvm::Value *Src,
John McCall7f416cc2015-09-08 08:05:57 +00001298 Address Dst,
Anders Carlsson17490832009-12-24 20:40:36 +00001299 bool DstIsVolatile,
Daniel Dunbarf5589ac2009-02-02 19:06:38 +00001300 CodeGenFunction &CGF) {
Chris Lattner2192fe52011-07-18 04:24:23 +00001301 llvm::Type *SrcTy = Src->getType();
Eli Friedman83fa8112020-04-03 15:11:40 -07001302 llvm::Type *DstTy = Dst.getElementType();
Chris Lattnerd200eda2010-06-28 22:51:39 +00001303 if (SrcTy == DstTy) {
John McCall7f416cc2015-09-08 08:05:57 +00001304 CGF.Builder.CreateStore(Src, Dst, DstIsVolatile);
Chris Lattnerd200eda2010-06-28 22:51:39 +00001305 return;
1306 }
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00001307
Micah Villmowdd31ca12012-10-08 16:25:52 +00001308 uint64_t SrcSize = CGF.CGM.getDataLayout().getTypeAllocSize(SrcTy);
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00001309
Chris Lattner2192fe52011-07-18 04:24:23 +00001310 if (llvm::StructType *DstSTy = dyn_cast<llvm::StructType>(DstTy)) {
John McCall7f416cc2015-09-08 08:05:57 +00001311 Dst = EnterStructPointerForCoercedAccess(Dst, DstSTy, SrcSize, CGF);
Eli Friedman83fa8112020-04-03 15:11:40 -07001312 DstTy = Dst.getElementType();
Chris Lattner895c52b2010-06-27 06:04:18 +00001313 }
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00001314
Michael Liao15140e42019-11-04 11:41:07 -05001315 llvm::PointerType *SrcPtrTy = llvm::dyn_cast<llvm::PointerType>(SrcTy);
1316 llvm::PointerType *DstPtrTy = llvm::dyn_cast<llvm::PointerType>(DstTy);
1317 if (SrcPtrTy && DstPtrTy &&
1318 SrcPtrTy->getAddressSpace() != DstPtrTy->getAddressSpace()) {
1319 Src = CGF.Builder.CreatePointerBitCastOrAddrSpaceCast(Src, DstTy);
1320 CGF.Builder.CreateStore(Src, Dst, DstIsVolatile);
1321 return;
1322 }
1323
Chris Lattner055097f2010-06-27 06:26:04 +00001324 // If the source and destination are integer or pointer types, just do an
1325 // extension or truncation to the desired type.
1326 if ((isa<llvm::IntegerType>(SrcTy) || isa<llvm::PointerType>(SrcTy)) &&
1327 (isa<llvm::IntegerType>(DstTy) || isa<llvm::PointerType>(DstTy))) {
1328 Src = CoerceIntOrPtrToIntOrPtr(Src, DstTy, CGF);
John McCall7f416cc2015-09-08 08:05:57 +00001329 CGF.Builder.CreateStore(Src, Dst, DstIsVolatile);
Chris Lattner055097f2010-06-27 06:26:04 +00001330 return;
1331 }
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00001332
Micah Villmowdd31ca12012-10-08 16:25:52 +00001333 uint64_t DstSize = CGF.CGM.getDataLayout().getTypeAllocSize(DstTy);
Daniel Dunbarf5589ac2009-02-02 19:06:38 +00001334
Daniel Dunbar313321e2009-02-03 05:31:23 +00001335 // If store is legal, just bitcast the src pointer.
Daniel Dunbar4be99ff2009-06-05 07:58:54 +00001336 if (SrcSize <= DstSize) {
Yaxun Liue9e5c4f2017-06-29 18:47:45 +00001337 Dst = CGF.Builder.CreateElementBitCast(Dst, SrcTy);
John McCall7f416cc2015-09-08 08:05:57 +00001338 BuildAggStore(CGF, Src, Dst, DstIsVolatile);
Daniel Dunbarf5589ac2009-02-02 19:06:38 +00001339 } else {
Daniel Dunbarf5589ac2009-02-02 19:06:38 +00001340 // Otherwise do coercion through memory. This is stupid, but
1341 // simple.
Daniel Dunbar4be99ff2009-06-05 07:58:54 +00001342
1343 // Generally SrcSize is never greater than DstSize, since this means we are
1344 // losing bits. However, this can happen in cases where the structure has
1345 // additional padding, for example due to a user specified alignment.
1346 //
1347 // FIXME: Assert that we aren't truncating non-padding bits when have access
1348 // to that information.
John McCall7f416cc2015-09-08 08:05:57 +00001349 Address Tmp = CreateTempAllocaForCoercion(CGF, SrcTy, Dst.getAlignment());
1350 CGF.Builder.CreateStore(Src, Tmp);
Yaxun Liu4bbdebc2018-11-08 16:55:46 +00001351 Address Casted = CGF.Builder.CreateElementBitCast(Tmp,CGF.Int8Ty);
1352 Address DstCasted = CGF.Builder.CreateElementBitCast(Dst,CGF.Int8Ty);
Manman Ren84b921f2012-11-28 22:08:52 +00001353 CGF.Builder.CreateMemCpy(DstCasted, Casted,
1354 llvm::ConstantInt::get(CGF.IntPtrTy, DstSize),
John McCall7f416cc2015-09-08 08:05:57 +00001355 false);
Daniel Dunbarf5589ac2009-02-02 19:06:38 +00001356 }
1357}
1358
John McCall7f416cc2015-09-08 08:05:57 +00001359static Address emitAddressAtOffset(CodeGenFunction &CGF, Address addr,
Fangrui Song6907ce22018-07-30 19:24:48 +00001360 const ABIArgInfo &info) {
John McCall7f416cc2015-09-08 08:05:57 +00001361 if (unsigned offset = info.getDirectOffset()) {
1362 addr = CGF.Builder.CreateElementBitCast(addr, CGF.Int8Ty);
1363 addr = CGF.Builder.CreateConstInBoundsByteGEP(addr,
1364 CharUnits::fromQuantity(offset));
1365 addr = CGF.Builder.CreateElementBitCast(addr, info.getCoerceToType());
1366 }
1367 return addr;
1368}
1369
Alexey Samsonov153004f2014-09-29 22:08:00 +00001370namespace {
1371
1372/// Encapsulates information about the way function arguments from
1373/// CGFunctionInfo should be passed to actual LLVM IR function.
1374class ClangToLLVMArgMapping {
1375 static const unsigned InvalidIndex = ~0U;
1376 unsigned InallocaArgNo;
1377 unsigned SRetArgNo;
1378 unsigned TotalIRArgs;
1379
1380 /// Arguments of LLVM IR function corresponding to single Clang argument.
1381 struct IRArgs {
1382 unsigned PaddingArgIndex;
1383 // Argument is expanded to IR arguments at positions
1384 // [FirstArgIndex, FirstArgIndex + NumberOfArgs).
1385 unsigned FirstArgIndex;
1386 unsigned NumberOfArgs;
1387
1388 IRArgs()
1389 : PaddingArgIndex(InvalidIndex), FirstArgIndex(InvalidIndex),
1390 NumberOfArgs(0) {}
1391 };
1392
1393 SmallVector<IRArgs, 8> ArgInfo;
1394
1395public:
1396 ClangToLLVMArgMapping(const ASTContext &Context, const CGFunctionInfo &FI,
1397 bool OnlyRequiredArgs = false)
1398 : InallocaArgNo(InvalidIndex), SRetArgNo(InvalidIndex), TotalIRArgs(0),
1399 ArgInfo(OnlyRequiredArgs ? FI.getNumRequiredArgs() : FI.arg_size()) {
1400 construct(Context, FI, OnlyRequiredArgs);
1401 }
1402
1403 bool hasInallocaArg() const { return InallocaArgNo != InvalidIndex; }
1404 unsigned getInallocaArgNo() const {
1405 assert(hasInallocaArg());
1406 return InallocaArgNo;
1407 }
1408
1409 bool hasSRetArg() const { return SRetArgNo != InvalidIndex; }
1410 unsigned getSRetArgNo() const {
1411 assert(hasSRetArg());
1412 return SRetArgNo;
1413 }
1414
1415 unsigned totalIRArgs() const { return TotalIRArgs; }
1416
1417 bool hasPaddingArg(unsigned ArgNo) const {
1418 assert(ArgNo < ArgInfo.size());
1419 return ArgInfo[ArgNo].PaddingArgIndex != InvalidIndex;
1420 }
1421 unsigned getPaddingArgNo(unsigned ArgNo) const {
1422 assert(hasPaddingArg(ArgNo));
1423 return ArgInfo[ArgNo].PaddingArgIndex;
1424 }
1425
1426 /// Returns index of first IR argument corresponding to ArgNo, and their
1427 /// quantity.
1428 std::pair<unsigned, unsigned> getIRArgs(unsigned ArgNo) const {
1429 assert(ArgNo < ArgInfo.size());
1430 return std::make_pair(ArgInfo[ArgNo].FirstArgIndex,
1431 ArgInfo[ArgNo].NumberOfArgs);
1432 }
1433
1434private:
1435 void construct(const ASTContext &Context, const CGFunctionInfo &FI,
1436 bool OnlyRequiredArgs);
1437};
1438
1439void ClangToLLVMArgMapping::construct(const ASTContext &Context,
1440 const CGFunctionInfo &FI,
1441 bool OnlyRequiredArgs) {
1442 unsigned IRArgNo = 0;
1443 bool SwapThisWithSRet = false;
1444 const ABIArgInfo &RetAI = FI.getReturnInfo();
1445
1446 if (RetAI.getKind() == ABIArgInfo::Indirect) {
1447 SwapThisWithSRet = RetAI.isSRetAfterThis();
1448 SRetArgNo = SwapThisWithSRet ? 1 : IRArgNo++;
1449 }
1450
1451 unsigned ArgNo = 0;
1452 unsigned NumArgs = OnlyRequiredArgs ? FI.getNumRequiredArgs() : FI.arg_size();
1453 for (CGFunctionInfo::const_arg_iterator I = FI.arg_begin(); ArgNo < NumArgs;
1454 ++I, ++ArgNo) {
1455 assert(I != FI.arg_end());
1456 QualType ArgType = I->type;
1457 const ABIArgInfo &AI = I->info;
1458 // Collect data about IR arguments corresponding to Clang argument ArgNo.
1459 auto &IRArgs = ArgInfo[ArgNo];
1460
1461 if (AI.getPaddingType())
1462 IRArgs.PaddingArgIndex = IRArgNo++;
1463
1464 switch (AI.getKind()) {
1465 case ABIArgInfo::Extend:
1466 case ABIArgInfo::Direct: {
1467 // FIXME: handle sseregparm someday...
1468 llvm::StructType *STy = dyn_cast<llvm::StructType>(AI.getCoerceToType());
1469 if (AI.isDirect() && AI.getCanBeFlattened() && STy) {
1470 IRArgs.NumberOfArgs = STy->getNumElements();
1471 } else {
1472 IRArgs.NumberOfArgs = 1;
1473 }
1474 break;
1475 }
1476 case ABIArgInfo::Indirect:
1477 IRArgs.NumberOfArgs = 1;
1478 break;
1479 case ABIArgInfo::Ignore:
1480 case ABIArgInfo::InAlloca:
1481 // ignore and inalloca doesn't have matching LLVM parameters.
1482 IRArgs.NumberOfArgs = 0;
1483 break;
John McCallf26e73d2016-03-11 04:30:43 +00001484 case ABIArgInfo::CoerceAndExpand:
1485 IRArgs.NumberOfArgs = AI.getCoerceAndExpandTypeSequence().size();
1486 break;
1487 case ABIArgInfo::Expand:
Alexey Samsonov153004f2014-09-29 22:08:00 +00001488 IRArgs.NumberOfArgs = getExpansionSize(ArgType, Context);
1489 break;
1490 }
Alexey Samsonov153004f2014-09-29 22:08:00 +00001491
1492 if (IRArgs.NumberOfArgs > 0) {
1493 IRArgs.FirstArgIndex = IRArgNo;
1494 IRArgNo += IRArgs.NumberOfArgs;
1495 }
1496
1497 // Skip over the sret parameter when it comes second. We already handled it
1498 // above.
1499 if (IRArgNo == 1 && SwapThisWithSRet)
1500 IRArgNo++;
1501 }
1502 assert(ArgNo == ArgInfo.size());
1503
1504 if (FI.usesInAlloca())
1505 InallocaArgNo = IRArgNo++;
1506
1507 TotalIRArgs = IRArgNo;
1508}
1509} // namespace
1510
Daniel Dunbar8fc81b02008-09-17 00:51:38 +00001511/***/
1512
Daniel Dunbar6f2e8392010-07-14 23:39:36 +00001513bool CodeGenModule::ReturnTypeUsesSRet(const CGFunctionInfo &FI) {
Saleem Abdulrasoolf181f1a2018-02-28 20:16:12 +00001514 const auto &RI = FI.getReturnInfo();
1515 return RI.isIndirect() || (RI.isInAlloca() && RI.getInAllocaSRet());
Daniel Dunbar7633cbf2009-02-02 21:43:58 +00001516}
1517
Tim Northovere77cc392014-03-29 13:28:05 +00001518bool CodeGenModule::ReturnSlotInterferesWithArgs(const CGFunctionInfo &FI) {
1519 return ReturnTypeUsesSRet(FI) &&
1520 getTargetCodeGenInfo().doesReturnSlotInterfereWithArgs();
1521}
1522
Daniel Dunbar6f2e8392010-07-14 23:39:36 +00001523bool CodeGenModule::ReturnTypeUsesFPRet(QualType ResultType) {
1524 if (const BuiltinType *BT = ResultType->getAs<BuiltinType>()) {
1525 switch (BT->getKind()) {
1526 default:
1527 return false;
1528 case BuiltinType::Float:
John McCallc8e01702013-04-16 22:48:15 +00001529 return getTarget().useObjCFPRetForRealType(TargetInfo::Float);
Daniel Dunbar6f2e8392010-07-14 23:39:36 +00001530 case BuiltinType::Double:
John McCallc8e01702013-04-16 22:48:15 +00001531 return getTarget().useObjCFPRetForRealType(TargetInfo::Double);
Daniel Dunbar6f2e8392010-07-14 23:39:36 +00001532 case BuiltinType::LongDouble:
John McCallc8e01702013-04-16 22:48:15 +00001533 return getTarget().useObjCFPRetForRealType(TargetInfo::LongDouble);
Daniel Dunbar6f2e8392010-07-14 23:39:36 +00001534 }
1535 }
1536
1537 return false;
1538}
1539
Anders Carlsson2f1a6c32011-10-31 16:27:11 +00001540bool CodeGenModule::ReturnTypeUsesFP2Ret(QualType ResultType) {
1541 if (const ComplexType *CT = ResultType->getAs<ComplexType>()) {
1542 if (const BuiltinType *BT = CT->getElementType()->getAs<BuiltinType>()) {
1543 if (BT->getKind() == BuiltinType::LongDouble)
John McCallc8e01702013-04-16 22:48:15 +00001544 return getTarget().useObjCFP2RetForComplexLongDouble();
Anders Carlsson2f1a6c32011-10-31 16:27:11 +00001545 }
1546 }
1547
1548 return false;
1549}
1550
Chris Lattnera5f58b02011-07-09 17:41:47 +00001551llvm::FunctionType *CodeGenTypes::GetFunctionType(GlobalDecl GD) {
John McCalla729c622012-02-17 03:33:10 +00001552 const CGFunctionInfo &FI = arrangeGlobalDeclaration(GD);
1553 return GetFunctionType(FI);
John McCallf8ff7b92010-02-23 00:48:20 +00001554}
1555
Chris Lattnera5f58b02011-07-09 17:41:47 +00001556llvm::FunctionType *
John McCalla729c622012-02-17 03:33:10 +00001557CodeGenTypes::GetFunctionType(const CGFunctionInfo &FI) {
Alexey Samsonov153004f2014-09-29 22:08:00 +00001558
David Blaikie82e95a32014-11-19 07:49:47 +00001559 bool Inserted = FunctionsBeingProcessed.insert(&FI).second;
1560 (void)Inserted;
Chris Lattner6fb0ccf2011-07-15 05:16:14 +00001561 assert(Inserted && "Recursively being processed?");
Daniel Dunbar7a95ca32008-09-10 04:01:49 +00001562
Alexey Samsonov153004f2014-09-29 22:08:00 +00001563 llvm::Type *resultType = nullptr;
John McCall85dd2c52011-05-15 02:19:42 +00001564 const ABIArgInfo &retAI = FI.getReturnInfo();
1565 switch (retAI.getKind()) {
Daniel Dunbard3674e62008-09-11 01:48:57 +00001566 case ABIArgInfo::Expand:
John McCall85dd2c52011-05-15 02:19:42 +00001567 llvm_unreachable("Invalid ABI kind for return argument");
Daniel Dunbard3674e62008-09-11 01:48:57 +00001568
Anton Korobeynikov18adbf52009-06-06 09:36:29 +00001569 case ABIArgInfo::Extend:
Daniel Dunbar67dace892009-02-03 06:17:37 +00001570 case ABIArgInfo::Direct:
John McCall85dd2c52011-05-15 02:19:42 +00001571 resultType = retAI.getCoerceToType();
Daniel Dunbar67dace892009-02-03 06:17:37 +00001572 break;
1573
Reid Kleckner314ef7b2014-02-01 00:04:45 +00001574 case ABIArgInfo::InAlloca:
Reid Klecknerfab1e892014-02-25 00:59:14 +00001575 if (retAI.getInAllocaSRet()) {
1576 // sret things on win32 aren't void, they return the sret pointer.
1577 QualType ret = FI.getReturnType();
1578 llvm::Type *ty = ConvertType(ret);
1579 unsigned addressSpace = Context.getTargetAddressSpace(ret);
1580 resultType = llvm::PointerType::get(ty, addressSpace);
1581 } else {
1582 resultType = llvm::Type::getVoidTy(getLLVMContext());
1583 }
Reid Kleckner314ef7b2014-02-01 00:04:45 +00001584 break;
1585
John McCall7f416cc2015-09-08 08:05:57 +00001586 case ABIArgInfo::Indirect:
Daniel Dunbar94a6f252009-01-26 21:26:08 +00001587 case ABIArgInfo::Ignore:
John McCall85dd2c52011-05-15 02:19:42 +00001588 resultType = llvm::Type::getVoidTy(getLLVMContext());
Daniel Dunbar94a6f252009-01-26 21:26:08 +00001589 break;
John McCallf26e73d2016-03-11 04:30:43 +00001590
1591 case ABIArgInfo::CoerceAndExpand:
1592 resultType = retAI.getUnpaddedCoerceAndExpandType();
1593 break;
Daniel Dunbar7a95ca32008-09-10 04:01:49 +00001594 }
Mike Stump11289f42009-09-09 15:08:12 +00001595
Alexey Samsonov153004f2014-09-29 22:08:00 +00001596 ClangToLLVMArgMapping IRFunctionArgs(getContext(), FI, true);
1597 SmallVector<llvm::Type*, 8> ArgTypes(IRFunctionArgs.totalIRArgs());
1598
1599 // Add type for sret argument.
1600 if (IRFunctionArgs.hasSRetArg()) {
1601 QualType Ret = FI.getReturnType();
1602 llvm::Type *Ty = ConvertType(Ret);
1603 unsigned AddressSpace = Context.getTargetAddressSpace(Ret);
1604 ArgTypes[IRFunctionArgs.getSRetArgNo()] =
1605 llvm::PointerType::get(Ty, AddressSpace);
1606 }
1607
1608 // Add type for inalloca argument.
1609 if (IRFunctionArgs.hasInallocaArg()) {
1610 auto ArgStruct = FI.getArgStruct();
1611 assert(ArgStruct);
1612 ArgTypes[IRFunctionArgs.getInallocaArgNo()] = ArgStruct->getPointerTo();
1613 }
1614
John McCallc818bbb2012-12-07 07:03:17 +00001615 // Add in all of the required arguments.
Alexey Samsonov153004f2014-09-29 22:08:00 +00001616 unsigned ArgNo = 0;
Alexey Samsonov34625dd2014-09-29 21:21:48 +00001617 CGFunctionInfo::const_arg_iterator it = FI.arg_begin(),
1618 ie = it + FI.getNumRequiredArgs();
Alexey Samsonov153004f2014-09-29 22:08:00 +00001619 for (; it != ie; ++it, ++ArgNo) {
1620 const ABIArgInfo &ArgInfo = it->info;
Mike Stump11289f42009-09-09 15:08:12 +00001621
Rafael Espindolafad28de2012-10-24 01:59:00 +00001622 // Insert a padding type to ensure proper alignment.
Alexey Samsonov153004f2014-09-29 22:08:00 +00001623 if (IRFunctionArgs.hasPaddingArg(ArgNo))
1624 ArgTypes[IRFunctionArgs.getPaddingArgNo(ArgNo)] =
1625 ArgInfo.getPaddingType();
Rafael Espindolafad28de2012-10-24 01:59:00 +00001626
Alexey Samsonov153004f2014-09-29 22:08:00 +00001627 unsigned FirstIRArg, NumIRArgs;
1628 std::tie(FirstIRArg, NumIRArgs) = IRFunctionArgs.getIRArgs(ArgNo);
1629
1630 switch (ArgInfo.getKind()) {
Daniel Dunbar94a6f252009-01-26 21:26:08 +00001631 case ABIArgInfo::Ignore:
Reid Kleckner314ef7b2014-02-01 00:04:45 +00001632 case ABIArgInfo::InAlloca:
Alexey Samsonov153004f2014-09-29 22:08:00 +00001633 assert(NumIRArgs == 0);
Daniel Dunbar94a6f252009-01-26 21:26:08 +00001634 break;
1635
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00001636 case ABIArgInfo::Indirect: {
Alexey Samsonov153004f2014-09-29 22:08:00 +00001637 assert(NumIRArgs == 1);
Yaxun Liud7523282017-04-17 20:10:44 +00001638 // indirect arguments are always on the stack, which is alloca addr space.
Chris Lattner2192fe52011-07-18 04:24:23 +00001639 llvm::Type *LTy = ConvertTypeForMem(it->type);
Yaxun Liud7523282017-04-17 20:10:44 +00001640 ArgTypes[FirstIRArg] = LTy->getPointerTo(
1641 CGM.getDataLayout().getAllocaAddrSpace());
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00001642 break;
1643 }
1644
1645 case ABIArgInfo::Extend:
Chris Lattner2cdfda42010-07-29 06:44:09 +00001646 case ABIArgInfo::Direct: {
Oliver Stannard2bfdc5b2014-08-27 10:43:15 +00001647 // Fast-isel and the optimizer generally like scalar values better than
1648 // FCAs, so we flatten them if this is safe to do for this argument.
Alexey Samsonov153004f2014-09-29 22:08:00 +00001649 llvm::Type *argType = ArgInfo.getCoerceToType();
James Molloy6f244b62014-05-09 16:21:39 +00001650 llvm::StructType *st = dyn_cast<llvm::StructType>(argType);
Alexey Samsonov153004f2014-09-29 22:08:00 +00001651 if (st && ArgInfo.isDirect() && ArgInfo.getCanBeFlattened()) {
1652 assert(NumIRArgs == st->getNumElements());
John McCall85dd2c52011-05-15 02:19:42 +00001653 for (unsigned i = 0, e = st->getNumElements(); i != e; ++i)
Alexey Samsonov153004f2014-09-29 22:08:00 +00001654 ArgTypes[FirstIRArg + i] = st->getElementType(i);
Chris Lattner3dd716c2010-06-28 23:44:11 +00001655 } else {
Alexey Samsonov153004f2014-09-29 22:08:00 +00001656 assert(NumIRArgs == 1);
1657 ArgTypes[FirstIRArg] = argType;
Chris Lattner3dd716c2010-06-28 23:44:11 +00001658 }
Daniel Dunbar2f219b02009-02-03 19:12:28 +00001659 break;
Chris Lattner2cdfda42010-07-29 06:44:09 +00001660 }
Mike Stump11289f42009-09-09 15:08:12 +00001661
John McCallf26e73d2016-03-11 04:30:43 +00001662 case ABIArgInfo::CoerceAndExpand: {
1663 auto ArgTypesIter = ArgTypes.begin() + FirstIRArg;
1664 for (auto EltTy : ArgInfo.getCoerceAndExpandTypeSequence()) {
1665 *ArgTypesIter++ = EltTy;
1666 }
1667 assert(ArgTypesIter == ArgTypes.begin() + FirstIRArg + NumIRArgs);
1668 break;
1669 }
1670
Daniel Dunbard3674e62008-09-11 01:48:57 +00001671 case ABIArgInfo::Expand:
Alexey Samsonov153004f2014-09-29 22:08:00 +00001672 auto ArgTypesIter = ArgTypes.begin() + FirstIRArg;
1673 getExpandedTypes(it->type, ArgTypesIter);
1674 assert(ArgTypesIter == ArgTypes.begin() + FirstIRArg + NumIRArgs);
Daniel Dunbard3674e62008-09-11 01:48:57 +00001675 break;
1676 }
Daniel Dunbar7a95ca32008-09-10 04:01:49 +00001677 }
1678
Chris Lattner6fb0ccf2011-07-15 05:16:14 +00001679 bool Erased = FunctionsBeingProcessed.erase(&FI); (void)Erased;
1680 assert(Erased && "Not in set?");
Alexey Samsonov153004f2014-09-29 22:08:00 +00001681
1682 return llvm::FunctionType::get(resultType, ArgTypes, FI.isVariadic());
Daniel Dunbar81cf67f2008-09-09 23:48:28 +00001683}
1684
Chris Lattner2192fe52011-07-18 04:24:23 +00001685llvm::Type *CodeGenTypes::GetFunctionTypeForVTable(GlobalDecl GD) {
John McCall5d865c322010-08-31 07:33:07 +00001686 const CXXMethodDecl *MD = cast<CXXMethodDecl>(GD.getDecl());
Anders Carlsson64457732009-11-24 05:08:52 +00001687 const FunctionProtoType *FPT = MD->getType()->getAs<FunctionProtoType>();
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00001688
Chris Lattner8806e322011-07-10 00:18:59 +00001689 if (!isFuncTypeConvertible(FPT))
1690 return llvm::StructType::get(getLLVMContext());
Fangrui Song6907ce22018-07-30 19:24:48 +00001691
Peter Collingbourned1c5b282019-03-22 23:05:10 +00001692 return GetFunctionType(GD);
Anders Carlsson64457732009-11-24 05:08:52 +00001693}
1694
Samuel Antao798f11c2015-11-23 22:04:44 +00001695static void AddAttributesFromFunctionProtoType(ASTContext &Ctx,
1696 llvm::AttrBuilder &FuncAttrs,
1697 const FunctionProtoType *FPT) {
1698 if (!FPT)
1699 return;
1700
1701 if (!isUnresolvedExceptionSpec(FPT->getExceptionSpecType()) &&
Richard Smitheaf11ad2018-05-03 03:58:32 +00001702 FPT->isNothrow())
Samuel Antao798f11c2015-11-23 22:04:44 +00001703 FuncAttrs.addAttribute(llvm::Attribute::NoUnwind);
1704}
1705
Justin Lebarb080b632017-01-25 21:29:48 +00001706void CodeGenModule::ConstructDefaultFnAttrList(StringRef Name, bool HasOptnone,
1707 bool AttrOnCallSite,
1708 llvm::AttrBuilder &FuncAttrs) {
1709 // OptimizeNoneAttr takes precedence over -Os or -Oz. No warning needed.
1710 if (!HasOptnone) {
1711 if (CodeGenOpts.OptimizeSize)
1712 FuncAttrs.addAttribute(llvm::Attribute::OptimizeForSize);
1713 if (CodeGenOpts.OptimizeSize == 2)
1714 FuncAttrs.addAttribute(llvm::Attribute::MinSize);
1715 }
1716
1717 if (CodeGenOpts.DisableRedZone)
1718 FuncAttrs.addAttribute(llvm::Attribute::NoRedZone);
Kristina Brooks7f569b72018-10-18 14:07:02 +00001719 if (CodeGenOpts.IndirectTlsSegRefs)
1720 FuncAttrs.addAttribute("indirect-tls-seg-refs");
Justin Lebarb080b632017-01-25 21:29:48 +00001721 if (CodeGenOpts.NoImplicitFloat)
1722 FuncAttrs.addAttribute(llvm::Attribute::NoImplicitFloat);
1723
1724 if (AttrOnCallSite) {
1725 // Attributes that should go on the call site only.
1726 if (!CodeGenOpts.SimplifyLibCalls ||
1727 CodeGenOpts.isNoBuiltinFunc(Name.data()))
1728 FuncAttrs.addAttribute(llvm::Attribute::NoBuiltin);
1729 if (!CodeGenOpts.TrapFuncName.empty())
1730 FuncAttrs.addAttribute("trap-func-name", CodeGenOpts.TrapFuncName);
1731 } else {
Yuanfang Chenff22ec32019-07-20 22:50:50 +00001732 StringRef FpKind;
1733 switch (CodeGenOpts.getFramePointer()) {
1734 case CodeGenOptions::FramePointerKind::None:
1735 FpKind = "none";
1736 break;
1737 case CodeGenOptions::FramePointerKind::NonLeaf:
1738 FpKind = "non-leaf";
1739 break;
1740 case CodeGenOptions::FramePointerKind::All:
1741 FpKind = "all";
1742 break;
Justin Lebarb080b632017-01-25 21:29:48 +00001743 }
Yuanfang Chenff22ec32019-07-20 22:50:50 +00001744 FuncAttrs.addAttribute("frame-pointer", FpKind);
Justin Lebarb080b632017-01-25 21:29:48 +00001745
1746 FuncAttrs.addAttribute("less-precise-fpmad",
1747 llvm::toStringRef(CodeGenOpts.LessPreciseFPMAD));
1748
Manoj Guptada08f6a2018-07-19 00:44:52 +00001749 if (CodeGenOpts.NullPointerIsValid)
1750 FuncAttrs.addAttribute("null-pointer-is-valid", "true");
Matt Arsenaulta4451d82019-11-01 17:57:29 -07001751
Matt Arsenaulta4e71f02019-11-06 10:26:43 -08001752 if (CodeGenOpts.FPDenormalMode != llvm::DenormalMode::getIEEE())
Matt Arsenault7fe94352019-10-29 16:16:05 -07001753 FuncAttrs.addAttribute("denormal-fp-math",
Matt Arsenaulta3c814d2019-11-06 17:10:52 -08001754 CodeGenOpts.FPDenormalMode.str());
Matt Arsenaulta4e71f02019-11-06 10:26:43 -08001755 if (CodeGenOpts.FP32DenormalMode != CodeGenOpts.FPDenormalMode) {
Matt Arsenaulta4451d82019-11-01 17:57:29 -07001756 FuncAttrs.addAttribute(
1757 "denormal-fp-math-f32",
Matt Arsenaulta3c814d2019-11-06 17:10:52 -08001758 CodeGenOpts.FP32DenormalMode.str());
1759 }
Matt Arsenaulta4451d82019-11-01 17:57:29 -07001760
Justin Lebarb080b632017-01-25 21:29:48 +00001761 FuncAttrs.addAttribute("no-trapping-math",
1762 llvm::toStringRef(CodeGenOpts.NoTrappingMath));
1763
Sanjay Patelc81450e2018-04-30 18:19:03 +00001764 // Strict (compliant) code is the default, so only add this attribute to
1765 // indicate that we are trying to workaround a problem case.
1766 if (!CodeGenOpts.StrictFloatCastOverflow)
1767 FuncAttrs.addAttribute("strict-float-cast-overflow", "false");
Sanjay Pateld1754762018-04-27 14:22:48 +00001768
Justin Lebarb080b632017-01-25 21:29:48 +00001769 // TODO: Are these all needed?
1770 // unsafe/inf/nan/nsz are handled by instruction-level FastMathFlags.
1771 FuncAttrs.addAttribute("no-infs-fp-math",
1772 llvm::toStringRef(CodeGenOpts.NoInfsFPMath));
1773 FuncAttrs.addAttribute("no-nans-fp-math",
1774 llvm::toStringRef(CodeGenOpts.NoNaNsFPMath));
1775 FuncAttrs.addAttribute("unsafe-fp-math",
1776 llvm::toStringRef(CodeGenOpts.UnsafeFPMath));
1777 FuncAttrs.addAttribute("use-soft-float",
1778 llvm::toStringRef(CodeGenOpts.SoftFloat));
1779 FuncAttrs.addAttribute("stack-protector-buffer-size",
1780 llvm::utostr(CodeGenOpts.SSPBufferSize));
1781 FuncAttrs.addAttribute("no-signed-zeros-fp-math",
1782 llvm::toStringRef(CodeGenOpts.NoSignedZeros));
1783 FuncAttrs.addAttribute(
1784 "correctly-rounded-divide-sqrt-fp-math",
1785 llvm::toStringRef(CodeGenOpts.CorrectlyRoundedDivSqrt));
1786
1787 // TODO: Reciprocal estimate codegen options should apply to instructions?
Craig Topper402b4312017-11-20 17:09:22 +00001788 const std::vector<std::string> &Recips = CodeGenOpts.Reciprocals;
Justin Lebarb080b632017-01-25 21:29:48 +00001789 if (!Recips.empty())
1790 FuncAttrs.addAttribute("reciprocal-estimates",
Erich Keane857ac592017-10-27 18:45:06 +00001791 llvm::join(Recips, ","));
Justin Lebarb080b632017-01-25 21:29:48 +00001792
Craig Topper9a724aa2017-12-11 21:09:19 +00001793 if (!CodeGenOpts.PreferVectorWidth.empty() &&
1794 CodeGenOpts.PreferVectorWidth != "none")
1795 FuncAttrs.addAttribute("prefer-vector-width",
1796 CodeGenOpts.PreferVectorWidth);
1797
Justin Lebarb080b632017-01-25 21:29:48 +00001798 if (CodeGenOpts.StackRealignment)
1799 FuncAttrs.addAttribute("stackrealign");
1800 if (CodeGenOpts.Backchain)
1801 FuncAttrs.addAttribute("backchain");
Chandler Carruth664aa862018-09-04 12:38:00 +00001802
1803 if (CodeGenOpts.SpeculativeLoadHardening)
1804 FuncAttrs.addAttribute(llvm::Attribute::SpeculativeLoadHardening);
Justin Lebarb080b632017-01-25 21:29:48 +00001805 }
1806
Matt Arsenaulta1cf61b2017-10-06 19:34:40 +00001807 if (getLangOpts().assumeFunctionsAreConvergent()) {
1808 // Conservatively, mark all functions and calls in CUDA and OpenCL as
1809 // convergent (meaning, they may call an intrinsically convergent op, such
1810 // as __syncthreads() / barrier(), and so can't have certain optimizations
1811 // applied around them). LLVM will remove this attribute where it safely
1812 // can.
Justin Lebarb080b632017-01-25 21:29:48 +00001813 FuncAttrs.addAttribute(llvm::Attribute::Convergent);
Matt Arsenaulta1cf61b2017-10-06 19:34:40 +00001814 }
Justin Lebarb080b632017-01-25 21:29:48 +00001815
Matt Arsenaulta1cf61b2017-10-06 19:34:40 +00001816 if (getLangOpts().CUDA && getLangOpts().CUDAIsDevice) {
Justin Lebarb080b632017-01-25 21:29:48 +00001817 // Exceptions aren't supported in CUDA device code.
1818 FuncAttrs.addAttribute(llvm::Attribute::NoUnwind);
Justin Lebarb080b632017-01-25 21:29:48 +00001819 }
Peter Collingbourne87f477b2019-01-04 19:27:04 +00001820
1821 for (StringRef Attr : CodeGenOpts.DefaultFunctionAttrs) {
1822 StringRef Var, Value;
1823 std::tie(Var, Value) = Attr.split('=');
1824 FuncAttrs.addAttribute(Var, Value);
1825 }
Justin Lebarb080b632017-01-25 21:29:48 +00001826}
1827
1828void CodeGenModule::AddDefaultFnAttrs(llvm::Function &F) {
1829 llvm::AttrBuilder FuncAttrs;
Evandro Menezes85bd3972019-04-04 22:40:06 +00001830 ConstructDefaultFnAttrList(F.getName(), F.hasOptNone(),
Rui Ueyama49a3ad22019-07-16 04:46:31 +00001831 /* AttrOnCallSite = */ false, FuncAttrs);
Reid Kleckneree4930b2017-05-02 22:07:37 +00001832 F.addAttributes(llvm::AttributeList::FunctionIndex, FuncAttrs);
Justin Lebarb080b632017-01-25 21:29:48 +00001833}
1834
Francis Visoiu Mistrih4e799ad2020-01-27 10:40:14 -08001835static void addNoBuiltinAttributes(llvm::AttrBuilder &FuncAttrs,
1836 const LangOptions &LangOpts,
1837 const NoBuiltinAttr *NBA = nullptr) {
1838 auto AddNoBuiltinAttr = [&FuncAttrs](StringRef BuiltinName) {
1839 SmallString<32> AttributeName;
1840 AttributeName += "no-builtin-";
1841 AttributeName += BuiltinName;
1842 FuncAttrs.addAttribute(AttributeName);
1843 };
1844
Francis Visoiu Mistrihb1a81892020-01-28 15:23:28 -08001845 // First, handle the language options passed through -fno-builtin.
Francis Visoiu Mistrih4e799ad2020-01-27 10:40:14 -08001846 if (LangOpts.NoBuiltin) {
1847 // -fno-builtin disables them all.
1848 FuncAttrs.addAttribute("no-builtins");
1849 return;
1850 }
1851
1852 // Then, add attributes for builtins specified through -fno-builtin-<name>.
1853 llvm::for_each(LangOpts.NoBuiltinFuncs, AddNoBuiltinAttr);
1854
1855 // Now, let's check the __attribute__((no_builtin("...")) attribute added to
1856 // the source.
1857 if (!NBA)
1858 return;
1859
1860 // If there is a wildcard in the builtin names specified through the
1861 // attribute, disable them all.
1862 if (llvm::is_contained(NBA->builtinNames(), "*")) {
1863 FuncAttrs.addAttribute("no-builtins");
1864 return;
1865 }
1866
1867 // And last, add the rest of the builtin names.
1868 llvm::for_each(NBA->builtinNames(), AddNoBuiltinAttr);
1869}
1870
Chad Rosier7dbc9cf2016-01-06 14:35:46 +00001871void CodeGenModule::ConstructAttributeList(
1872 StringRef Name, const CGFunctionInfo &FI, CGCalleeInfo CalleeInfo,
Reid Klecknercdd26792017-04-18 23:50:03 +00001873 llvm::AttributeList &AttrList, unsigned &CallingConv, bool AttrOnCallSite) {
Bill Wendlinga514ebc2012-10-15 20:36:26 +00001874 llvm::AttrBuilder FuncAttrs;
1875 llvm::AttrBuilder RetAttrs;
Daniel Dunbar76c8eb72008-09-10 00:32:18 +00001876
Daniel Dunbar0ef34792009-09-12 00:59:20 +00001877 CallingConv = FI.getEffectiveCallingConvention();
John McCallab26cfa2010-02-05 21:31:56 +00001878 if (FI.isNoReturn())
Bill Wendling207f0532012-12-20 19:27:06 +00001879 FuncAttrs.addAttribute(llvm::Attribute::NoReturn);
John McCallab26cfa2010-02-05 21:31:56 +00001880
Momchil Velikov080d0462020-03-24 09:32:51 +00001881 if (FI.isCmseNSCall())
1882 FuncAttrs.addAttribute("cmse_nonsecure_call");
1883
Samuel Antao798f11c2015-11-23 22:04:44 +00001884 // If we have information about the function prototype, we can learn
Craig Topper13d759f2018-04-30 22:02:48 +00001885 // attributes from there.
Samuel Antao798f11c2015-11-23 22:04:44 +00001886 AddAttributesFromFunctionProtoType(getContext(), FuncAttrs,
1887 CalleeInfo.getCalleeFunctionProtoType());
1888
Erich Keanede6480a32018-11-13 15:48:08 +00001889 const Decl *TargetDecl = CalleeInfo.getCalleeDecl().getDecl();
Samuel Antao798f11c2015-11-23 22:04:44 +00001890
Justin Lebarb080b632017-01-25 21:29:48 +00001891 bool HasOptnone = false;
Francis Visoiu Mistrih4e799ad2020-01-27 10:40:14 -08001892 // The NoBuiltinAttr attached to a TargetDecl (only allowed on FunctionDecls).
1893 const NoBuiltinAttr *NBA = nullptr;
Anton Korobeynikovc8478242009-04-04 00:49:24 +00001894 // FIXME: handle sseregparm someday...
Daniel Dunbar76c8eb72008-09-10 00:32:18 +00001895 if (TargetDecl) {
Rafael Espindola2d21ab02011-10-12 19:51:18 +00001896 if (TargetDecl->hasAttr<ReturnsTwiceAttr>())
Bill Wendling207f0532012-12-20 19:27:06 +00001897 FuncAttrs.addAttribute(llvm::Attribute::ReturnsTwice);
Argyrios Kyrtzidisb4b64ca2009-06-30 02:34:44 +00001898 if (TargetDecl->hasAttr<NoThrowAttr>())
Bill Wendling207f0532012-12-20 19:27:06 +00001899 FuncAttrs.addAttribute(llvm::Attribute::NoUnwind);
Richard Smithdebc59d2013-01-30 05:45:05 +00001900 if (TargetDecl->hasAttr<NoReturnAttr>())
1901 FuncAttrs.addAttribute(llvm::Attribute::NoReturn);
Xinliang David Li4ec36062017-06-13 21:14:07 +00001902 if (TargetDecl->hasAttr<ColdAttr>())
1903 FuncAttrs.addAttribute(llvm::Attribute::Cold);
Aaron Ballman7c19ab12014-02-22 16:59:24 +00001904 if (TargetDecl->hasAttr<NoDuplicateAttr>())
1905 FuncAttrs.addAttribute(llvm::Attribute::NoDuplicate);
Yaxun Liu7d07ae72016-11-01 18:45:32 +00001906 if (TargetDecl->hasAttr<ConvergentAttr>())
1907 FuncAttrs.addAttribute(llvm::Attribute::Convergent);
Richard Smithdebc59d2013-01-30 05:45:05 +00001908
Chandler Carruth45bbe012017-03-24 09:11:57 +00001909 if (const FunctionDecl *Fn = dyn_cast<FunctionDecl>(TargetDecl)) {
Samuel Antao798f11c2015-11-23 22:04:44 +00001910 AddAttributesFromFunctionProtoType(
Chandler Carruth45bbe012017-03-24 09:11:57 +00001911 getContext(), FuncAttrs, Fn->getType()->getAs<FunctionProtoType>());
Roman Lebedev3dd5a292020-02-26 01:37:17 +03001912 if (AttrOnCallSite && Fn->isReplaceableGlobalAllocationFunction()) {
1913 // A sane operator new returns a non-aliasing pointer.
1914 auto Kind = Fn->getDeclName().getCXXOverloadedOperator();
1915 if (getCodeGenOpts().AssumeSaneOperatorNew &&
1916 (Kind == OO_New || Kind == OO_Array_New))
1917 RetAttrs.addAttribute(llvm::Attribute::NoAlias);
1918 }
Chandler Carruth45bbe012017-03-24 09:11:57 +00001919 const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(Fn);
Guillaume Chatelet98f31512019-09-25 11:31:28 +02001920 const bool IsVirtualCall = MD && MD->isVirtual();
1921 // Don't use [[noreturn]], _Noreturn or [[no_builtin]] for a call to a
1922 // virtual function. These attributes are not inherited by overloads.
1923 if (!(AttrOnCallSite && IsVirtualCall)) {
1924 if (Fn->isNoReturn())
1925 FuncAttrs.addAttribute(llvm::Attribute::NoReturn);
Francis Visoiu Mistrih4e799ad2020-01-27 10:40:14 -08001926 NBA = Fn->getAttr<NoBuiltinAttr>();
Guillaume Chatelet98f31512019-09-25 11:31:28 +02001927 }
John McCallbe349de2010-07-08 06:48:12 +00001928 }
1929
David Majnemer1bf0f8e2015-07-20 22:51:52 +00001930 // 'const', 'pure' and 'noalias' attributed functions are also nounwind.
Eric Christopherbf005ec2011-08-15 22:38:22 +00001931 if (TargetDecl->hasAttr<ConstAttr>()) {
Bill Wendling207f0532012-12-20 19:27:06 +00001932 FuncAttrs.addAttribute(llvm::Attribute::ReadNone);
1933 FuncAttrs.addAttribute(llvm::Attribute::NoUnwind);
Eric Christopherbf005ec2011-08-15 22:38:22 +00001934 } else if (TargetDecl->hasAttr<PureAttr>()) {
Bill Wendling207f0532012-12-20 19:27:06 +00001935 FuncAttrs.addAttribute(llvm::Attribute::ReadOnly);
1936 FuncAttrs.addAttribute(llvm::Attribute::NoUnwind);
David Majnemer1bf0f8e2015-07-20 22:51:52 +00001937 } else if (TargetDecl->hasAttr<NoAliasAttr>()) {
1938 FuncAttrs.addAttribute(llvm::Attribute::ArgMemOnly);
1939 FuncAttrs.addAttribute(llvm::Attribute::NoUnwind);
Eric Christopherbf005ec2011-08-15 22:38:22 +00001940 }
David Majnemer631a90b2015-02-04 07:23:21 +00001941 if (TargetDecl->hasAttr<RestrictAttr>())
Bill Wendling207f0532012-12-20 19:27:06 +00001942 RetAttrs.addAttribute(llvm::Attribute::NoAlias);
Manoj Guptada08f6a2018-07-19 00:44:52 +00001943 if (TargetDecl->hasAttr<ReturnsNonNullAttr>() &&
1944 !CodeGenOpts.NullPointerIsValid)
Hal Finkeld8442b12014-07-12 04:51:04 +00001945 RetAttrs.addAttribute(llvm::Attribute::NonNull);
Oren Ben Simhon318a6ea2017-04-27 12:01:00 +00001946 if (TargetDecl->hasAttr<AnyX86NoCallerSavedRegistersAttr>())
1947 FuncAttrs.addAttribute("no_caller_saved_registers");
Oren Ben Simhon220671a2018-03-17 13:31:35 +00001948 if (TargetDecl->hasAttr<AnyX86NoCfCheckAttr>())
1949 FuncAttrs.addAttribute(llvm::Attribute::NoCfCheck);
Paul Robinson08556952014-12-11 20:14:04 +00001950
1951 HasOptnone = TargetDecl->hasAttr<OptimizeNoneAttr>();
George Burgess IVe3763372016-12-22 02:50:20 +00001952 if (auto *AllocSize = TargetDecl->getAttr<AllocSizeAttr>()) {
1953 Optional<unsigned> NumElemsParam;
Joel E. Denny81508102018-03-13 14:51:22 +00001954 if (AllocSize->getNumElemsParam().isValid())
1955 NumElemsParam = AllocSize->getNumElemsParam().getLLVMIndex();
1956 FuncAttrs.addAllocSizeAttr(AllocSize->getElemSizeParam().getLLVMIndex(),
George Burgess IVe3763372016-12-22 02:50:20 +00001957 NumElemsParam);
1958 }
Daniel Dunbar76c8eb72008-09-10 00:32:18 +00001959 }
1960
Francis Visoiu Mistrih4e799ad2020-01-27 10:40:14 -08001961 // Attach "no-builtins" attributes to:
1962 // * call sites: both `nobuiltin` and "no-builtins" or "no-builtin-<name>".
1963 // * definitions: "no-builtins" or "no-builtin-<name>" only.
1964 // The attributes can come from:
1965 // * LangOpts: -ffreestanding, -fno-builtin, -fno-builtin-<name>
1966 // * FunctionDecl attributes: __attribute__((no_builtin(...)))
1967 addNoBuiltinAttributes(FuncAttrs, getLangOpts(), NBA);
1968
Justin Lebarb080b632017-01-25 21:29:48 +00001969 ConstructDefaultFnAttrList(Name, HasOptnone, AttrOnCallSite, FuncAttrs);
Paul Robinson08556952014-12-11 20:14:04 +00001970
Zola Bridges826ef592019-01-18 17:20:46 +00001971 // This must run after constructing the default function attribute list
1972 // to ensure that the speculative load hardening attribute is removed
1973 // in the case where the -mspeculative-load-hardening flag was passed.
1974 if (TargetDecl) {
1975 if (TargetDecl->hasAttr<NoSpeculativeLoadHardeningAttr>())
1976 FuncAttrs.removeAttribute(llvm::Attribute::SpeculativeLoadHardening);
1977 if (TargetDecl->hasAttr<SpeculativeLoadHardeningAttr>())
1978 FuncAttrs.addAttribute(llvm::Attribute::SpeculativeLoadHardening);
1979 }
1980
Peter Collingbourneb4728c12014-05-19 22:14:34 +00001981 if (CodeGenOpts.EnableSegmentedStacks &&
1982 !(TargetDecl && TargetDecl->hasAttr<NoSplitStackAttr>()))
Reid Klecknerfb873af2014-04-10 22:59:13 +00001983 FuncAttrs.addAttribute("split-stack");
Devang Patel6e467b12009-06-04 23:32:02 +00001984
Sriraman Tallam5c651482017-11-07 19:37:51 +00001985 // Add NonLazyBind attribute to function declarations when -fno-plt
1986 // is used.
1987 if (TargetDecl && CodeGenOpts.NoPLT) {
1988 if (auto *Fn = dyn_cast<FunctionDecl>(TargetDecl)) {
1989 if (!Fn->isDefined() && !AttrOnCallSite) {
1990 FuncAttrs.addAttribute(llvm::Attribute::NonLazyBind);
1991 }
1992 }
1993 }
1994
Alexey Sotkin20f65922018-02-22 11:54:14 +00001995 if (TargetDecl && TargetDecl->hasAttr<OpenCLKernelAttr>()) {
1996 if (getLangOpts().OpenCLVersion <= 120) {
1997 // OpenCL v1.2 Work groups are always uniform
1998 FuncAttrs.addAttribute("uniform-work-group-size", "true");
1999 } else {
2000 // OpenCL v2.0 Work groups may be whether uniform or not.
2001 // '-cl-uniform-work-group-size' compile option gets a hint
2002 // to the compiler that the global work-size be a multiple of
2003 // the work-group size specified to clEnqueueNDRangeKernel
2004 // (i.e. work groups are uniform).
2005 FuncAttrs.addAttribute("uniform-work-group-size",
2006 llvm::toStringRef(CodeGenOpts.UniformWGSize));
2007 }
2008 }
2009
Justin Lebarb080b632017-01-25 21:29:48 +00002010 if (!AttrOnCallSite) {
Momchil Velikov080d0462020-03-24 09:32:51 +00002011 if (TargetDecl && TargetDecl->hasAttr<CmseNSEntryAttr>())
2012 FuncAttrs.addAttribute("cmse_nonsecure_entry");
2013
Akira Hatanaka627586b2018-03-02 01:53:15 +00002014 bool DisableTailCalls = false;
2015
2016 if (CodeGenOpts.DisableTailCalls)
2017 DisableTailCalls = true;
2018 else if (TargetDecl) {
2019 if (TargetDecl->hasAttr<DisableTailCallsAttr>() ||
2020 TargetDecl->hasAttr<AnyX86InterruptAttr>())
2021 DisableTailCalls = true;
2022 else if (CodeGenOpts.NoEscapingBlockTailCalls) {
2023 if (const auto *BD = dyn_cast<BlockDecl>(TargetDecl))
2024 if (!BD->doesNotEscape())
2025 DisableTailCalls = true;
2026 }
2027 }
2028
Justin Lebarb080b632017-01-25 21:29:48 +00002029 FuncAttrs.addAttribute("disable-tail-calls",
2030 llvm::toStringRef(DisableTailCalls));
Erich Keanede6480a32018-11-13 15:48:08 +00002031 GetCPUAndFeaturesAttributes(CalleeInfo.getCalleeDecl(), FuncAttrs);
Bill Wendling985d1c52013-02-15 21:30:01 +00002032 }
2033
Alexey Samsonov153004f2014-09-29 22:08:00 +00002034 ClangToLLVMArgMapping IRFunctionArgs(getContext(), FI);
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002035
Daniel Dunbar3668cb22009-02-02 23:43:58 +00002036 QualType RetTy = FI.getReturnType();
Daniel Dunbarb52d0772009-02-03 05:59:18 +00002037 const ABIArgInfo &RetAI = FI.getReturnInfo();
Daniel Dunbar7a95ca32008-09-10 04:01:49 +00002038 switch (RetAI.getKind()) {
Anton Korobeynikov18adbf52009-06-06 09:36:29 +00002039 case ABIArgInfo::Extend:
Alex Bradburye41a5e22018-01-12 20:08:16 +00002040 if (RetAI.isSignExt())
Jakob Stoklund Olesend7bf2932013-05-29 03:57:23 +00002041 RetAttrs.addAttribute(llvm::Attribute::SExt);
Alex Bradburye41a5e22018-01-12 20:08:16 +00002042 else
Jakob Stoklund Olesend7bf2932013-05-29 03:57:23 +00002043 RetAttrs.addAttribute(llvm::Attribute::ZExt);
Adrian Prantlf3b3ccd2017-12-19 22:06:11 +00002044 LLVM_FALLTHROUGH;
Daniel Dunbar67dace892009-02-03 06:17:37 +00002045 case ABIArgInfo::Direct:
Jakob Stoklund Olesena3661142013-06-05 03:00:09 +00002046 if (RetAI.getInReg())
2047 RetAttrs.addAttribute(llvm::Attribute::InReg);
2048 break;
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00002049 case ABIArgInfo::Ignore:
Daniel Dunbara72d4ae2008-09-10 02:41:04 +00002050 break;
2051
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002052 case ABIArgInfo::InAlloca:
Rafael Espindola06b2b4a2012-07-31 02:44:24 +00002053 case ABIArgInfo::Indirect: {
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002054 // inalloca and sret disable readnone and readonly
Bill Wendling207f0532012-12-20 19:27:06 +00002055 FuncAttrs.removeAttribute(llvm::Attribute::ReadOnly)
2056 .removeAttribute(llvm::Attribute::ReadNone);
Daniel Dunbara72d4ae2008-09-10 02:41:04 +00002057 break;
Rafael Espindola06b2b4a2012-07-31 02:44:24 +00002058 }
Daniel Dunbara72d4ae2008-09-10 02:41:04 +00002059
John McCallf26e73d2016-03-11 04:30:43 +00002060 case ABIArgInfo::CoerceAndExpand:
2061 break;
2062
Daniel Dunbard3674e62008-09-11 01:48:57 +00002063 case ABIArgInfo::Expand:
David Blaikie83d382b2011-09-23 05:06:16 +00002064 llvm_unreachable("Invalid ABI kind for return argument");
Daniel Dunbar76c8eb72008-09-10 00:32:18 +00002065 }
Daniel Dunbara72d4ae2008-09-10 02:41:04 +00002066
Hal Finkela2347ba2014-07-18 15:52:10 +00002067 if (const auto *RefTy = RetTy->getAs<ReferenceType>()) {
2068 QualType PTy = RefTy->getPointeeType();
David Majnemer9df56372015-09-10 21:52:00 +00002069 if (!PTy->isIncompleteType() && PTy->isConstantSizeType())
Richard Smith0130b6c2020-01-31 19:06:21 -08002070 RetAttrs.addDereferenceableAttr(
2071 getMinimumObjectSize(PTy).getQuantity());
Manoj Guptada08f6a2018-07-19 00:44:52 +00002072 else if (getContext().getTargetAddressSpace(PTy) == 0 &&
2073 !CodeGenOpts.NullPointerIsValid)
Hal Finkela2347ba2014-07-18 15:52:10 +00002074 RetAttrs.addAttribute(llvm::Attribute::NonNull);
2075 }
Nick Lewycky9b46eb82014-05-28 09:56:42 +00002076
John McCall12f23522016-04-04 18:33:08 +00002077 bool hasUsedSRet = false;
Reid Klecknercdd26792017-04-18 23:50:03 +00002078 SmallVector<llvm::AttributeSet, 4> ArgAttrs(IRFunctionArgs.totalIRArgs());
John McCall12f23522016-04-04 18:33:08 +00002079
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002080 // Attach attributes to sret.
2081 if (IRFunctionArgs.hasSRetArg()) {
2082 llvm::AttrBuilder SRETAttrs;
Mandeep Singh Grang85a0f8f2019-05-03 21:12:24 +00002083 SRETAttrs.addAttribute(llvm::Attribute::StructRet);
John McCall12f23522016-04-04 18:33:08 +00002084 hasUsedSRet = true;
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002085 if (RetAI.getInReg())
2086 SRETAttrs.addAttribute(llvm::Attribute::InReg);
Erik Pilkingtonde98cf92020-03-24 12:36:19 -04002087 SRETAttrs.addAlignmentAttr(RetAI.getIndirectAlign().getQuantity());
Reid Klecknercdd26792017-04-18 23:50:03 +00002088 ArgAttrs[IRFunctionArgs.getSRetArgNo()] =
2089 llvm::AttributeSet::get(getLLVMContext(), SRETAttrs);
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002090 }
2091
2092 // Attach attributes to inalloca argument.
2093 if (IRFunctionArgs.hasInallocaArg()) {
2094 llvm::AttrBuilder Attrs;
2095 Attrs.addAttribute(llvm::Attribute::InAlloca);
Reid Klecknercdd26792017-04-18 23:50:03 +00002096 ArgAttrs[IRFunctionArgs.getInallocaArgNo()] =
2097 llvm::AttributeSet::get(getLLVMContext(), Attrs);
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002098 }
2099
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002100 unsigned ArgNo = 0;
2101 for (CGFunctionInfo::const_arg_iterator I = FI.arg_begin(),
2102 E = FI.arg_end();
2103 I != E; ++I, ++ArgNo) {
2104 QualType ParamType = I->type;
2105 const ABIArgInfo &AI = I->info;
Bill Wendlinga514ebc2012-10-15 20:36:26 +00002106 llvm::AttrBuilder Attrs;
Anton Korobeynikovc8478242009-04-04 00:49:24 +00002107
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002108 // Add attribute for padding argument, if necessary.
2109 if (IRFunctionArgs.hasPaddingArg(ArgNo)) {
Reid Klecknercdd26792017-04-18 23:50:03 +00002110 if (AI.getPaddingInReg()) {
2111 ArgAttrs[IRFunctionArgs.getPaddingArgNo(ArgNo)] =
2112 llvm::AttributeSet::get(
2113 getLLVMContext(),
2114 llvm::AttrBuilder().addAttribute(llvm::Attribute::InReg));
2115 }
Rafael Espindolafad28de2012-10-24 01:59:00 +00002116 }
2117
John McCall39ec71f2010-03-27 00:47:27 +00002118 // 'restrict' -> 'noalias' is done in EmitFunctionProlog when we
2119 // have the corresponding parameter variable. It doesn't make
Daniel Dunbarcb2b3d02011-02-10 18:10:07 +00002120 // sense to do it here because parameters are so messed up.
Daniel Dunbard3674e62008-09-11 01:48:57 +00002121 switch (AI.getKind()) {
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00002122 case ABIArgInfo::Extend:
Alex Bradburye41a5e22018-01-12 20:08:16 +00002123 if (AI.isSignExt())
Bill Wendling207f0532012-12-20 19:27:06 +00002124 Attrs.addAttribute(llvm::Attribute::SExt);
Alex Bradburye41a5e22018-01-12 20:08:16 +00002125 else
2126 Attrs.addAttribute(llvm::Attribute::ZExt);
Adrian Prantlf3b3ccd2017-12-19 22:06:11 +00002127 LLVM_FALLTHROUGH;
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002128 case ABIArgInfo::Direct:
Peter Collingbournef7706832014-12-12 23:41:25 +00002129 if (ArgNo == 0 && FI.isChainCall())
2130 Attrs.addAttribute(llvm::Attribute::Nest);
2131 else if (AI.getInReg())
Bill Wendling207f0532012-12-20 19:27:06 +00002132 Attrs.addAttribute(llvm::Attribute::InReg);
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00002133 break;
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002134
James Y Knight71608572015-08-21 18:19:06 +00002135 case ABIArgInfo::Indirect: {
Rafael Espindola703c47f2012-10-19 05:04:37 +00002136 if (AI.getInReg())
Bill Wendling207f0532012-12-20 19:27:06 +00002137 Attrs.addAttribute(llvm::Attribute::InReg);
Rafael Espindola703c47f2012-10-19 05:04:37 +00002138
Anders Carlsson20759ad2009-09-16 15:53:40 +00002139 if (AI.getIndirectByVal())
Tim Northoverc46827c2019-06-05 21:12:14 +00002140 Attrs.addByValAttr(getTypes().ConvertTypeForMem(ParamType));
Anders Carlsson20759ad2009-09-16 15:53:40 +00002141
John McCall7f416cc2015-09-08 08:05:57 +00002142 CharUnits Align = AI.getIndirectAlign();
James Y Knight71608572015-08-21 18:19:06 +00002143
2144 // In a byval argument, it is important that the required
2145 // alignment of the type is honored, as LLVM might be creating a
2146 // *new* stack object, and needs to know what alignment to give
2147 // it. (Sometimes it can deduce a sensible alignment on its own,
2148 // but not if clang decides it must emit a packed struct, or the
2149 // user specifies increased alignment requirements.)
2150 //
2151 // This is different from indirect *not* byval, where the object
2152 // exists already, and the align attribute is purely
2153 // informative.
John McCall7f416cc2015-09-08 08:05:57 +00002154 assert(!Align.isZero());
James Y Knight71608572015-08-21 18:19:06 +00002155
John McCall7f416cc2015-09-08 08:05:57 +00002156 // For now, only add this when we have a byval argument.
2157 // TODO: be less lazy about updating test cases.
2158 if (AI.getIndirectByVal())
2159 Attrs.addAlignmentAttr(Align.getQuantity());
Bill Wendlinga7912f82012-10-10 07:36:56 +00002160
Daniel Dunbarc2304432009-03-18 19:51:01 +00002161 // byval disables readnone and readonly.
Bill Wendling207f0532012-12-20 19:27:06 +00002162 FuncAttrs.removeAttribute(llvm::Attribute::ReadOnly)
2163 .removeAttribute(llvm::Attribute::ReadNone);
Daniel Dunbard3674e62008-09-11 01:48:57 +00002164 break;
James Y Knight71608572015-08-21 18:19:06 +00002165 }
Daniel Dunbar94a6f252009-01-26 21:26:08 +00002166 case ABIArgInfo::Ignore:
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002167 case ABIArgInfo::Expand:
John McCallf26e73d2016-03-11 04:30:43 +00002168 case ABIArgInfo::CoerceAndExpand:
2169 break;
Daniel Dunbar94a6f252009-01-26 21:26:08 +00002170
Reid Kleckner314ef7b2014-02-01 00:04:45 +00002171 case ABIArgInfo::InAlloca:
2172 // inalloca disables readnone and readonly.
2173 FuncAttrs.removeAttribute(llvm::Attribute::ReadOnly)
2174 .removeAttribute(llvm::Attribute::ReadNone);
Reid Kleckner314ef7b2014-02-01 00:04:45 +00002175 continue;
Daniel Dunbar76c8eb72008-09-10 00:32:18 +00002176 }
Mike Stump11289f42009-09-09 15:08:12 +00002177
Hal Finkela2347ba2014-07-18 15:52:10 +00002178 if (const auto *RefTy = ParamType->getAs<ReferenceType>()) {
2179 QualType PTy = RefTy->getPointeeType();
David Majnemer9df56372015-09-10 21:52:00 +00002180 if (!PTy->isIncompleteType() && PTy->isConstantSizeType())
Richard Smith0130b6c2020-01-31 19:06:21 -08002181 Attrs.addDereferenceableAttr(
2182 getMinimumObjectSize(PTy).getQuantity());
Manoj Guptada08f6a2018-07-19 00:44:52 +00002183 else if (getContext().getTargetAddressSpace(PTy) == 0 &&
2184 !CodeGenOpts.NullPointerIsValid)
Hal Finkela2347ba2014-07-18 15:52:10 +00002185 Attrs.addAttribute(llvm::Attribute::NonNull);
2186 }
Nick Lewycky9b46eb82014-05-28 09:56:42 +00002187
John McCall12f23522016-04-04 18:33:08 +00002188 switch (FI.getExtParameterInfo(ArgNo).getABI()) {
2189 case ParameterABI::Ordinary:
2190 break;
2191
2192 case ParameterABI::SwiftIndirectResult: {
2193 // Add 'sret' if we haven't already used it for something, but
2194 // only if the result is void.
2195 if (!hasUsedSRet && RetTy->isVoidType()) {
2196 Attrs.addAttribute(llvm::Attribute::StructRet);
2197 hasUsedSRet = true;
2198 }
2199
2200 // Add 'noalias' in either case.
2201 Attrs.addAttribute(llvm::Attribute::NoAlias);
2202
2203 // Add 'dereferenceable' and 'alignment'.
2204 auto PTy = ParamType->getPointeeType();
2205 if (!PTy->isIncompleteType() && PTy->isConstantSizeType()) {
2206 auto info = getContext().getTypeInfoInChars(PTy);
2207 Attrs.addDereferenceableAttr(info.first.getQuantity());
Guillaume Chateletb65fa482019-10-15 12:56:24 +00002208 Attrs.addAttribute(llvm::Attribute::getWithAlignment(
2209 getLLVMContext(), info.second.getAsAlign()));
John McCall12f23522016-04-04 18:33:08 +00002210 }
2211 break;
2212 }
2213
2214 case ParameterABI::SwiftErrorResult:
2215 Attrs.addAttribute(llvm::Attribute::SwiftError);
2216 break;
2217
2218 case ParameterABI::SwiftContext:
2219 Attrs.addAttribute(llvm::Attribute::SwiftSelf);
2220 break;
2221 }
2222
Akira Hatanaka98a49332017-09-22 00:41:05 +00002223 if (FI.getExtParameterInfo(ArgNo).isNoEscape())
2224 Attrs.addAttribute(llvm::Attribute::NoCapture);
2225
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002226 if (Attrs.hasAttributes()) {
2227 unsigned FirstIRArg, NumIRArgs;
2228 std::tie(FirstIRArg, NumIRArgs) = IRFunctionArgs.getIRArgs(ArgNo);
2229 for (unsigned i = 0; i < NumIRArgs; i++)
Reid Klecknercdd26792017-04-18 23:50:03 +00002230 ArgAttrs[FirstIRArg + i] =
2231 llvm::AttributeSet::get(getLLVMContext(), Attrs);
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002232 }
Daniel Dunbar76c8eb72008-09-10 00:32:18 +00002233 }
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002234 assert(ArgNo == FI.arg_size());
Reid Kleckner314ef7b2014-02-01 00:04:45 +00002235
Reid Klecknercdd26792017-04-18 23:50:03 +00002236 AttrList = llvm::AttributeList::get(
2237 getLLVMContext(), llvm::AttributeSet::get(getLLVMContext(), FuncAttrs),
2238 llvm::AttributeSet::get(getLLVMContext(), RetAttrs), ArgAttrs);
Daniel Dunbar76c8eb72008-09-10 00:32:18 +00002239}
2240
John McCalla738c252011-03-09 04:27:21 +00002241/// An argument came in as a promoted argument; demote it back to its
2242/// declared type.
2243static llvm::Value *emitArgumentDemotion(CodeGenFunction &CGF,
2244 const VarDecl *var,
2245 llvm::Value *value) {
Chris Lattner2192fe52011-07-18 04:24:23 +00002246 llvm::Type *varType = CGF.ConvertType(var->getType());
John McCalla738c252011-03-09 04:27:21 +00002247
2248 // This can happen with promotions that actually don't change the
2249 // underlying type, like the enum promotions.
2250 if (value->getType() == varType) return value;
2251
2252 assert((varType->isIntegerTy() || varType->isFloatingPointTy())
2253 && "unexpected promotion type");
2254
2255 if (isa<llvm::IntegerType>(varType))
2256 return CGF.Builder.CreateTrunc(value, varType, "arg.unpromote");
2257
2258 return CGF.Builder.CreateFPCast(value, varType, "arg.unpromote");
2259}
2260
Chandler Carruth45bbe012017-03-24 09:11:57 +00002261/// Returns the attribute (either parameter attribute, or function
2262/// attribute), which declares argument ArgNo to be non-null.
2263static const NonNullAttr *getNonNullAttr(const Decl *FD, const ParmVarDecl *PVD,
2264 QualType ArgType, unsigned ArgNo) {
2265 // FIXME: __attribute__((nonnull)) can also be applied to:
2266 // - references to pointers, where the pointee is known to be
2267 // nonnull (apparently a Clang extension)
2268 // - transparent unions containing pointers
2269 // In the former case, LLVM IR cannot represent the constraint. In
2270 // the latter case, we have no guarantee that the transparent union
2271 // is in fact passed as a pointer.
2272 if (!ArgType->isAnyPointerType() && !ArgType->isBlockPointerType())
2273 return nullptr;
2274 // First, check attribute on parameter itself.
2275 if (PVD) {
2276 if (auto ParmNNAttr = PVD->getAttr<NonNullAttr>())
2277 return ParmNNAttr;
2278 }
2279 // Check function attributes.
2280 if (!FD)
2281 return nullptr;
2282 for (const auto *NNAttr : FD->specific_attrs<NonNullAttr>()) {
2283 if (NNAttr->isNonNull(ArgNo))
2284 return NNAttr;
2285 }
2286 return nullptr;
2287}
2288
John McCall12f23522016-04-04 18:33:08 +00002289namespace {
2290 struct CopyBackSwiftError final : EHScopeStack::Cleanup {
2291 Address Temp;
2292 Address Arg;
2293 CopyBackSwiftError(Address temp, Address arg) : Temp(temp), Arg(arg) {}
2294 void Emit(CodeGenFunction &CGF, Flags flags) override {
2295 llvm::Value *errorValue = CGF.Builder.CreateLoad(Temp);
2296 CGF.Builder.CreateStore(errorValue, Arg);
2297 }
2298 };
2299}
2300
Daniel Dunbard931a872009-02-02 22:03:45 +00002301void CodeGenFunction::EmitFunctionProlog(const CGFunctionInfo &FI,
2302 llvm::Function *Fn,
Daniel Dunbar613855c2008-09-09 23:27:19 +00002303 const FunctionArgList &Args) {
Hans Wennborgd71907d2014-09-04 22:16:33 +00002304 if (CurCodeDecl && CurCodeDecl->hasAttr<NakedAttr>())
2305 // Naked functions don't have prologues.
2306 return;
2307
John McCallcaa19452009-07-28 01:00:58 +00002308 // If this is an implicit-return-zero function, go ahead and
2309 // initialize the return value. TODO: it might be nice to have
2310 // a more general mechanism for this that didn't require synthesized
2311 // return statements.
John McCalldec348f72013-05-03 07:33:41 +00002312 if (const FunctionDecl *FD = dyn_cast_or_null<FunctionDecl>(CurCodeDecl)) {
John McCallcaa19452009-07-28 01:00:58 +00002313 if (FD->hasImplicitReturnZero()) {
Alp Toker314cc812014-01-25 16:55:45 +00002314 QualType RetTy = FD->getReturnType().getUnqualifiedType();
Chris Lattner2192fe52011-07-18 04:24:23 +00002315 llvm::Type* LLVMTy = CGM.getTypes().ConvertType(RetTy);
Owen Anderson0b75f232009-07-31 20:28:54 +00002316 llvm::Constant* Zero = llvm::Constant::getNullValue(LLVMTy);
John McCallcaa19452009-07-28 01:00:58 +00002317 Builder.CreateStore(Zero, ReturnValue);
2318 }
2319 }
2320
Mike Stump18bb9282009-05-16 07:57:57 +00002321 // FIXME: We no longer need the types from FunctionArgList; lift up and
2322 // simplify.
Daniel Dunbar5a0acdc92009-02-03 06:02:10 +00002323
Alexey Samsonov153004f2014-09-29 22:08:00 +00002324 ClangToLLVMArgMapping IRFunctionArgs(CGM.getContext(), FI);
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002325 // Flattened function arguments.
John McCall12f23522016-04-04 18:33:08 +00002326 SmallVector<llvm::Value *, 16> FnArgs;
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002327 FnArgs.reserve(IRFunctionArgs.totalIRArgs());
2328 for (auto &Arg : Fn->args()) {
2329 FnArgs.push_back(&Arg);
2330 }
2331 assert(FnArgs.size() == IRFunctionArgs.totalIRArgs());
Mike Stump11289f42009-09-09 15:08:12 +00002332
Reid Kleckner314ef7b2014-02-01 00:04:45 +00002333 // If we're using inalloca, all the memory arguments are GEPs off of the last
2334 // parameter, which is a pointer to the complete memory area.
John McCall7f416cc2015-09-08 08:05:57 +00002335 Address ArgStruct = Address::invalid();
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002336 if (IRFunctionArgs.hasInallocaArg()) {
John McCall7f416cc2015-09-08 08:05:57 +00002337 ArgStruct = Address(FnArgs[IRFunctionArgs.getInallocaArgNo()],
2338 FI.getArgStructAlignment());
2339
2340 assert(ArgStruct.getType() == FI.getArgStruct()->getPointerTo());
Reid Kleckner314ef7b2014-02-01 00:04:45 +00002341 }
2342
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002343 // Name the struct return parameter.
2344 if (IRFunctionArgs.hasSRetArg()) {
John McCall12f23522016-04-04 18:33:08 +00002345 auto AI = cast<llvm::Argument>(FnArgs[IRFunctionArgs.getSRetArgNo()]);
Daniel Dunbar613855c2008-09-09 23:27:19 +00002346 AI->setName("agg.result");
Reid Klecknercdd26792017-04-18 23:50:03 +00002347 AI->addAttr(llvm::Attribute::NoAlias);
Daniel Dunbar613855c2008-09-09 23:27:19 +00002348 }
Mike Stump11289f42009-09-09 15:08:12 +00002349
Reid Kleckner314ef7b2014-02-01 00:04:45 +00002350 // Track if we received the parameter as a pointer (indirect, byval, or
2351 // inalloca). If already have a pointer, EmitParmDecl doesn't need to copy it
2352 // into a local alloca for us.
John McCall7f416cc2015-09-08 08:05:57 +00002353 SmallVector<ParamValue, 16> ArgVals;
Reid Kleckner314ef7b2014-02-01 00:04:45 +00002354 ArgVals.reserve(Args.size());
2355
Reid Kleckner739756c2013-12-04 19:23:12 +00002356 // Create a pointer value for every parameter declaration. This usually
2357 // entails copying one or more LLVM IR arguments into an alloca. Don't push
2358 // any cleanups or do anything that might unwind. We do that separately, so
2359 // we can push the cleanups in the correct order for the ABI.
Daniel Dunbara45bdbb2009-02-04 21:17:21 +00002360 assert(FI.arg_size() == Args.size() &&
2361 "Mismatch between function signature & arguments.");
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002362 unsigned ArgNo = 0;
Daniel Dunbarb52d0772009-02-03 05:59:18 +00002363 CGFunctionInfo::const_arg_iterator info_it = FI.arg_begin();
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002364 for (FunctionArgList::const_iterator i = Args.begin(), e = Args.end();
Devang Patel68a15252011-03-03 20:13:15 +00002365 i != e; ++i, ++info_it, ++ArgNo) {
John McCalla738c252011-03-09 04:27:21 +00002366 const VarDecl *Arg = *i;
Daniel Dunbarb52d0772009-02-03 05:59:18 +00002367 const ABIArgInfo &ArgI = info_it->info;
Daniel Dunbard3674e62008-09-11 01:48:57 +00002368
John McCalla738c252011-03-09 04:27:21 +00002369 bool isPromoted =
2370 isa<ParmVarDecl>(Arg) && cast<ParmVarDecl>(Arg)->isKNRPromoted();
Volodymyr Sapsai17ebdb22018-01-22 22:29:24 +00002371 // We are converting from ABIArgInfo type to VarDecl type directly, unless
2372 // the parameter is promoted. In this case we convert to
2373 // CGFunctionInfo::ArgInfo type with subsequent argument demotion.
2374 QualType Ty = isPromoted ? info_it->type : Arg->getType();
2375 assert(hasScalarEvaluationKind(Ty) ==
2376 hasScalarEvaluationKind(Arg->getType()));
John McCalla738c252011-03-09 04:27:21 +00002377
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002378 unsigned FirstIRArg, NumIRArgs;
2379 std::tie(FirstIRArg, NumIRArgs) = IRFunctionArgs.getIRArgs(ArgNo);
Rafael Espindolafad28de2012-10-24 01:59:00 +00002380
Daniel Dunbard3674e62008-09-11 01:48:57 +00002381 switch (ArgI.getKind()) {
Reid Kleckner314ef7b2014-02-01 00:04:45 +00002382 case ABIArgInfo::InAlloca: {
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002383 assert(NumIRArgs == 0);
John McCall7f416cc2015-09-08 08:05:57 +00002384 auto FieldIndex = ArgI.getInAllocaFieldIndex();
James Y Knight751fe282019-02-09 22:22:28 +00002385 Address V =
2386 Builder.CreateStructGEP(ArgStruct, FieldIndex, Arg->getName());
Reid Kleckner2c6a3892020-02-11 16:03:26 -08002387 if (ArgI.getInAllocaIndirect())
2388 V = Address(Builder.CreateLoad(V),
2389 getContext().getTypeAlignInChars(Ty));
John McCall7f416cc2015-09-08 08:05:57 +00002390 ArgVals.push_back(ParamValue::forIndirect(V));
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002391 break;
Reid Kleckner314ef7b2014-02-01 00:04:45 +00002392 }
2393
Daniel Dunbar747865a2009-02-05 09:16:39 +00002394 case ABIArgInfo::Indirect: {
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002395 assert(NumIRArgs == 1);
John McCall7f416cc2015-09-08 08:05:57 +00002396 Address ParamAddr = Address(FnArgs[FirstIRArg], ArgI.getIndirectAlign());
Daniel Dunbar7b7c2932010-09-16 20:42:02 +00002397
John McCall47fb9502013-03-07 21:37:08 +00002398 if (!hasScalarEvaluationKind(Ty)) {
Daniel Dunbar7b7c2932010-09-16 20:42:02 +00002399 // Aggregates and complex variables are accessed by reference. All we
John McCall7f416cc2015-09-08 08:05:57 +00002400 // need to do is realign the value, if requested.
2401 Address V = ParamAddr;
Daniel Dunbar7b7c2932010-09-16 20:42:02 +00002402 if (ArgI.getIndirectRealign()) {
John McCall7f416cc2015-09-08 08:05:57 +00002403 Address AlignedTemp = CreateMemTemp(Ty, "coerce");
Daniel Dunbar7b7c2932010-09-16 20:42:02 +00002404
2405 // Copy from the incoming argument pointer to the temporary with the
2406 // appropriate alignment.
2407 //
2408 // FIXME: We should have a common utility for generating an aggregate
2409 // copy.
Ken Dyck705ba072011-01-19 01:58:38 +00002410 CharUnits Size = getContext().getTypeSizeInChars(Ty);
John McCall7f416cc2015-09-08 08:05:57 +00002411 auto SizeVal = llvm::ConstantInt::get(IntPtrTy, Size.getQuantity());
2412 Address Dst = Builder.CreateBitCast(AlignedTemp, Int8PtrTy);
2413 Address Src = Builder.CreateBitCast(ParamAddr, Int8PtrTy);
2414 Builder.CreateMemCpy(Dst, Src, SizeVal, false);
Daniel Dunbar7b7c2932010-09-16 20:42:02 +00002415 V = AlignedTemp;
2416 }
John McCall7f416cc2015-09-08 08:05:57 +00002417 ArgVals.push_back(ParamValue::forIndirect(V));
Daniel Dunbar747865a2009-02-05 09:16:39 +00002418 } else {
2419 // Load scalar value from indirect argument.
John McCall7f416cc2015-09-08 08:05:57 +00002420 llvm::Value *V =
Stephen Kellyf2ceec42018-08-09 21:08:08 +00002421 EmitLoadOfScalar(ParamAddr, false, Ty, Arg->getBeginLoc());
John McCalla738c252011-03-09 04:27:21 +00002422
2423 if (isPromoted)
2424 V = emitArgumentDemotion(*this, Arg, V);
John McCall7f416cc2015-09-08 08:05:57 +00002425 ArgVals.push_back(ParamValue::forDirect(V));
Daniel Dunbar747865a2009-02-05 09:16:39 +00002426 }
Daniel Dunbar747865a2009-02-05 09:16:39 +00002427 break;
2428 }
Anton Korobeynikov18adbf52009-06-06 09:36:29 +00002429
2430 case ABIArgInfo::Extend:
Daniel Dunbar67dace892009-02-03 06:17:37 +00002431 case ABIArgInfo::Direct: {
Akira Hatanaka18334dd2012-01-09 19:08:06 +00002432
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00002433 // If we have the trivial case, handle it with no muss and fuss.
2434 if (!isa<llvm::StructType>(ArgI.getCoerceToType()) &&
Volodymyr Sapsai22b00ec2017-12-21 20:52:59 +00002435 ArgI.getCoerceToType() == ConvertType(Ty) &&
Chris Lattner8a2f3c72010-07-30 04:02:24 +00002436 ArgI.getDirectOffset() == 0) {
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002437 assert(NumIRArgs == 1);
John McCall12f23522016-04-04 18:33:08 +00002438 llvm::Value *V = FnArgs[FirstIRArg];
2439 auto AI = cast<llvm::Argument>(V);
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00002440
Hal Finkel48d53e22014-07-19 01:41:07 +00002441 if (const ParmVarDecl *PVD = dyn_cast<ParmVarDecl>(Arg)) {
Alexey Samsonov8e1162c2014-09-08 17:22:45 +00002442 if (getNonNullAttr(CurCodeDecl, PVD, PVD->getType(),
Manoj Guptada08f6a2018-07-19 00:44:52 +00002443 PVD->getFunctionScopeIndex()) &&
2444 !CGM.getCodeGenOpts().NullPointerIsValid)
Reid Klecknercdd26792017-04-18 23:50:03 +00002445 AI->addAttr(llvm::Attribute::NonNull);
Hal Finkel82504f02014-07-11 17:35:21 +00002446
Hal Finkel48d53e22014-07-19 01:41:07 +00002447 QualType OTy = PVD->getOriginalType();
2448 if (const auto *ArrTy =
2449 getContext().getAsConstantArrayType(OTy)) {
2450 // A C99 array parameter declaration with the static keyword also
2451 // indicates dereferenceability, and if the size is constant we can
2452 // use the dereferenceable attribute (which requires the size in
2453 // bytes).
Hal Finkel16e394a2014-07-19 02:13:40 +00002454 if (ArrTy->getSizeModifier() == ArrayType::Static) {
Hal Finkel48d53e22014-07-19 01:41:07 +00002455 QualType ETy = ArrTy->getElementType();
2456 uint64_t ArrSize = ArrTy->getSize().getZExtValue();
2457 if (!ETy->isIncompleteType() && ETy->isConstantSizeType() &&
2458 ArrSize) {
2459 llvm::AttrBuilder Attrs;
2460 Attrs.addDereferenceableAttr(
2461 getContext().getTypeSizeInChars(ETy).getQuantity()*ArrSize);
Reid Kleckner9d16fa02017-04-19 17:28:52 +00002462 AI->addAttrs(Attrs);
Manoj Guptada08f6a2018-07-19 00:44:52 +00002463 } else if (getContext().getTargetAddressSpace(ETy) == 0 &&
2464 !CGM.getCodeGenOpts().NullPointerIsValid) {
Reid Klecknercdd26792017-04-18 23:50:03 +00002465 AI->addAttr(llvm::Attribute::NonNull);
Hal Finkel48d53e22014-07-19 01:41:07 +00002466 }
2467 }
2468 } else if (const auto *ArrTy =
2469 getContext().getAsVariableArrayType(OTy)) {
2470 // For C99 VLAs with the static keyword, we don't know the size so
2471 // we can't use the dereferenceable attribute, but in addrspace(0)
2472 // we know that it must be nonnull.
2473 if (ArrTy->getSizeModifier() == VariableArrayType::Static &&
Manoj Guptada08f6a2018-07-19 00:44:52 +00002474 !getContext().getTargetAddressSpace(ArrTy->getElementType()) &&
2475 !CGM.getCodeGenOpts().NullPointerIsValid)
Reid Klecknercdd26792017-04-18 23:50:03 +00002476 AI->addAttr(llvm::Attribute::NonNull);
Hal Finkel48d53e22014-07-19 01:41:07 +00002477 }
Hal Finkel1b0d24e2014-10-02 21:21:25 +00002478
2479 const auto *AVAttr = PVD->getAttr<AlignValueAttr>();
2480 if (!AVAttr)
2481 if (const auto *TOTy = dyn_cast<TypedefType>(OTy))
2482 AVAttr = TOTy->getDecl()->getAttr<AlignValueAttr>();
Roman Lebedevbd1c0872019-01-15 09:44:25 +00002483 if (AVAttr && !SanOpts.has(SanitizerKind::Alignment)) {
2484 // If alignment-assumption sanitizer is enabled, we do *not* add
2485 // alignment attribute here, but emit normal alignment assumption,
2486 // so the UBSAN check could function.
Hal Finkel1b0d24e2014-10-02 21:21:25 +00002487 llvm::Value *AlignmentValue =
2488 EmitScalarExpr(AVAttr->getAlignment());
2489 llvm::ConstantInt *AlignmentCI =
2490 cast<llvm::ConstantInt>(AlignmentValue);
Roman Lebedev6b2f8202020-01-21 21:18:29 +03002491 AI->addAttrs(llvm::AttrBuilder().addAlignmentAttr(llvm::MaybeAlign(
2492 AlignmentCI->getLimitedValue(llvm::Value::MaximumAlignment))));
Hal Finkel1b0d24e2014-10-02 21:21:25 +00002493 }
Hal Finkel48d53e22014-07-19 01:41:07 +00002494 }
2495
Bill Wendling507c3512012-10-16 05:23:44 +00002496 if (Arg->getType().isRestrictQualified())
Reid Klecknercdd26792017-04-18 23:50:03 +00002497 AI->addAttr(llvm::Attribute::NoAlias);
John McCall39ec71f2010-03-27 00:47:27 +00002498
John McCall12f23522016-04-04 18:33:08 +00002499 // LLVM expects swifterror parameters to be used in very restricted
2500 // ways. Copy the value into a less-restricted temporary.
2501 if (FI.getExtParameterInfo(ArgNo).getABI()
2502 == ParameterABI::SwiftErrorResult) {
2503 QualType pointeeTy = Ty->getPointeeType();
2504 assert(pointeeTy->isPointerType());
2505 Address temp =
2506 CreateMemTemp(pointeeTy, getPointerAlign(), "swifterror.temp");
2507 Address arg = Address(V, getContext().getTypeAlignInChars(pointeeTy));
2508 llvm::Value *incomingErrorValue = Builder.CreateLoad(arg);
2509 Builder.CreateStore(incomingErrorValue, temp);
2510 V = temp.getPointer();
2511
2512 // Push a cleanup to copy the value back at the end of the function.
2513 // The convention does not guarantee that the value will be written
2514 // back if the function exits with an unwind exception.
2515 EHStack.pushCleanup<CopyBackSwiftError>(NormalCleanup, temp, arg);
2516 }
2517
Chris Lattner7369c142011-07-20 06:29:00 +00002518 // Ensure the argument is the correct type.
2519 if (V->getType() != ArgI.getCoerceToType())
2520 V = Builder.CreateBitCast(V, ArgI.getCoerceToType());
2521
John McCalla738c252011-03-09 04:27:21 +00002522 if (isPromoted)
2523 V = emitArgumentDemotion(*this, Arg, V);
Rafael Espindola8778c282012-11-29 16:09:03 +00002524
2525 // Because of merging of function types from multiple decls it is
2526 // possible for the type of an argument to not match the corresponding
2527 // type in the function type. Since we are codegening the callee
2528 // in here, add a cast to the argument type.
2529 llvm::Type *LTy = ConvertType(Arg->getType());
2530 if (V->getType() != LTy)
2531 V = Builder.CreateBitCast(V, LTy);
2532
John McCall7f416cc2015-09-08 08:05:57 +00002533 ArgVals.push_back(ParamValue::forDirect(V));
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00002534 break;
Daniel Dunbard5f1f552009-02-10 00:06:49 +00002535 }
Mike Stump11289f42009-09-09 15:08:12 +00002536
Volodymyr Sapsai22b00ec2017-12-21 20:52:59 +00002537 Address Alloca = CreateMemTemp(Ty, getContext().getDeclAlign(Arg),
2538 Arg->getName());
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00002539
John McCall7f416cc2015-09-08 08:05:57 +00002540 // Pointer to store into.
2541 Address Ptr = emitAddressAtOffset(*this, Alloca, ArgI);
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00002542
Oliver Stannard2bfdc5b2014-08-27 10:43:15 +00002543 // Fast-isel and the optimizer generally like scalar values better than
2544 // FCAs, so we flatten them if this is safe to do for this argument.
Evgeniy Stepanov3fae4ae2012-02-10 09:30:15 +00002545 llvm::StructType *STy = dyn_cast<llvm::StructType>(ArgI.getCoerceToType());
Oliver Stannard2bfdc5b2014-08-27 10:43:15 +00002546 if (ArgI.isDirect() && ArgI.getCanBeFlattened() && STy &&
2547 STy->getNumElements() > 1) {
Micah Villmowdd31ca12012-10-08 16:25:52 +00002548 uint64_t SrcSize = CGM.getDataLayout().getTypeAllocSize(STy);
John McCall7f416cc2015-09-08 08:05:57 +00002549 llvm::Type *DstTy = Ptr.getElementType();
Micah Villmowdd31ca12012-10-08 16:25:52 +00002550 uint64_t DstSize = CGM.getDataLayout().getTypeAllocSize(DstTy);
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00002551
John McCall7f416cc2015-09-08 08:05:57 +00002552 Address AddrToStoreInto = Address::invalid();
Evgeniy Stepanov3fae4ae2012-02-10 09:30:15 +00002553 if (SrcSize <= DstSize) {
Yaxun Liue9e5c4f2017-06-29 18:47:45 +00002554 AddrToStoreInto = Builder.CreateElementBitCast(Ptr, STy);
Evgeniy Stepanov3fae4ae2012-02-10 09:30:15 +00002555 } else {
John McCall7f416cc2015-09-08 08:05:57 +00002556 AddrToStoreInto =
2557 CreateTempAlloca(STy, Alloca.getAlignment(), "coerce");
Chris Lattner15ec3612010-06-29 00:06:42 +00002558 }
John McCall7f416cc2015-09-08 08:05:57 +00002559
2560 assert(STy->getNumElements() == NumIRArgs);
2561 for (unsigned i = 0, e = STy->getNumElements(); i != e; ++i) {
2562 auto AI = FnArgs[FirstIRArg + i];
2563 AI->setName(Arg->getName() + ".coerce" + Twine(i));
James Y Knight751fe282019-02-09 22:22:28 +00002564 Address EltPtr = Builder.CreateStructGEP(AddrToStoreInto, i);
John McCall7f416cc2015-09-08 08:05:57 +00002565 Builder.CreateStore(AI, EltPtr);
2566 }
2567
2568 if (SrcSize > DstSize) {
2569 Builder.CreateMemCpy(Ptr, AddrToStoreInto, DstSize);
2570 }
2571
Chris Lattner15ec3612010-06-29 00:06:42 +00002572 } else {
2573 // Simple case, just do a coerced store of the argument into the alloca.
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002574 assert(NumIRArgs == 1);
2575 auto AI = FnArgs[FirstIRArg];
Chris Lattner9e748e92010-06-29 00:14:52 +00002576 AI->setName(Arg->getName() + ".coerce");
Rui Ueyama49a3ad22019-07-16 04:46:31 +00002577 CreateCoercedStore(AI, Ptr, /*DstIsVolatile=*/false, *this);
Chris Lattner15ec3612010-06-29 00:06:42 +00002578 }
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00002579
Daniel Dunbar2f219b02009-02-03 19:12:28 +00002580 // Match to what EmitParmDecl is expecting for this type.
Volodymyr Sapsai22b00ec2017-12-21 20:52:59 +00002581 if (CodeGenFunction::hasScalarEvaluationKind(Ty)) {
John McCall7f416cc2015-09-08 08:05:57 +00002582 llvm::Value *V =
Stephen Kellyf2ceec42018-08-09 21:08:08 +00002583 EmitLoadOfScalar(Alloca, false, Ty, Arg->getBeginLoc());
John McCalla738c252011-03-09 04:27:21 +00002584 if (isPromoted)
2585 V = emitArgumentDemotion(*this, Arg, V);
John McCall7f416cc2015-09-08 08:05:57 +00002586 ArgVals.push_back(ParamValue::forDirect(V));
Reid Kleckner314ef7b2014-02-01 00:04:45 +00002587 } else {
John McCall7f416cc2015-09-08 08:05:57 +00002588 ArgVals.push_back(ParamValue::forIndirect(Alloca));
Daniel Dunbar6e3b7df2009-02-04 07:22:24 +00002589 }
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002590 break;
Daniel Dunbar2f219b02009-02-03 19:12:28 +00002591 }
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00002592
John McCallf26e73d2016-03-11 04:30:43 +00002593 case ABIArgInfo::CoerceAndExpand: {
2594 // Reconstruct into a temporary.
2595 Address alloca = CreateMemTemp(Ty, getContext().getDeclAlign(Arg));
2596 ArgVals.push_back(ParamValue::forIndirect(alloca));
2597
2598 auto coercionType = ArgI.getCoerceAndExpandType();
2599 alloca = Builder.CreateElementBitCast(alloca, coercionType);
John McCallf26e73d2016-03-11 04:30:43 +00002600
2601 unsigned argIndex = FirstIRArg;
2602 for (unsigned i = 0, e = coercionType->getNumElements(); i != e; ++i) {
2603 llvm::Type *eltType = coercionType->getElementType(i);
2604 if (ABIArgInfo::isPaddingForCoerceAndExpand(eltType))
2605 continue;
2606
James Y Knight751fe282019-02-09 22:22:28 +00002607 auto eltAddr = Builder.CreateStructGEP(alloca, i);
John McCallf26e73d2016-03-11 04:30:43 +00002608 auto elt = FnArgs[argIndex++];
2609 Builder.CreateStore(elt, eltAddr);
2610 }
2611 assert(argIndex == FirstIRArg + NumIRArgs);
2612 break;
2613 }
2614
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00002615 case ABIArgInfo::Expand: {
2616 // If this structure was expanded into multiple arguments then
2617 // we need to create a temporary and reconstruct it from the
2618 // arguments.
John McCall7f416cc2015-09-08 08:05:57 +00002619 Address Alloca = CreateMemTemp(Ty, getContext().getDeclAlign(Arg));
2620 LValue LV = MakeAddrLValue(Alloca, Ty);
2621 ArgVals.push_back(ParamValue::forIndirect(Alloca));
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00002622
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002623 auto FnArgIter = FnArgs.begin() + FirstIRArg;
2624 ExpandTypeFromArgs(Ty, LV, FnArgIter);
2625 assert(FnArgIter == FnArgs.begin() + FirstIRArg + NumIRArgs);
2626 for (unsigned i = 0, e = NumIRArgs; i != e; ++i) {
2627 auto AI = FnArgs[FirstIRArg + i];
2628 AI->setName(Arg->getName() + "." + Twine(i));
2629 }
2630 break;
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00002631 }
2632
2633 case ABIArgInfo::Ignore:
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002634 assert(NumIRArgs == 0);
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00002635 // Initialize the local variable appropriately.
Reid Kleckner314ef7b2014-02-01 00:04:45 +00002636 if (!hasScalarEvaluationKind(Ty)) {
John McCall7f416cc2015-09-08 08:05:57 +00002637 ArgVals.push_back(ParamValue::forIndirect(CreateMemTemp(Ty)));
Reid Kleckner314ef7b2014-02-01 00:04:45 +00002638 } else {
2639 llvm::Value *U = llvm::UndefValue::get(ConvertType(Arg->getType()));
John McCall7f416cc2015-09-08 08:05:57 +00002640 ArgVals.push_back(ParamValue::forDirect(U));
Reid Kleckner314ef7b2014-02-01 00:04:45 +00002641 }
Alexey Samsonov91cf4552014-08-22 01:06:06 +00002642 break;
Daniel Dunbard3674e62008-09-11 01:48:57 +00002643 }
Daniel Dunbar613855c2008-09-09 23:27:19 +00002644 }
Reid Kleckner314ef7b2014-02-01 00:04:45 +00002645
Reid Kleckner739756c2013-12-04 19:23:12 +00002646 if (getTarget().getCXXABI().areArgsDestroyedLeftToRightInCallee()) {
2647 for (int I = Args.size() - 1; I >= 0; --I)
John McCall7f416cc2015-09-08 08:05:57 +00002648 EmitParmDecl(*Args[I], ArgVals[I], I + 1);
Reid Kleckner739756c2013-12-04 19:23:12 +00002649 } else {
2650 for (unsigned I = 0, E = Args.size(); I != E; ++I)
John McCall7f416cc2015-09-08 08:05:57 +00002651 EmitParmDecl(*Args[I], ArgVals[I], I + 1);
Reid Kleckner739756c2013-12-04 19:23:12 +00002652 }
Daniel Dunbar613855c2008-09-09 23:27:19 +00002653}
2654
John McCallffa2c1a2012-01-29 07:46:59 +00002655static void eraseUnusedBitCasts(llvm::Instruction *insn) {
2656 while (insn->use_empty()) {
2657 llvm::BitCastInst *bitcast = dyn_cast<llvm::BitCastInst>(insn);
2658 if (!bitcast) return;
2659
2660 // This is "safe" because we would have used a ConstantExpr otherwise.
2661 insn = cast<llvm::Instruction>(bitcast->getOperand(0));
2662 bitcast->eraseFromParent();
2663 }
2664}
2665
John McCall31168b02011-06-15 23:02:42 +00002666/// Try to emit a fused autorelease of a return result.
2667static llvm::Value *tryEmitFusedAutoreleaseOfResult(CodeGenFunction &CGF,
2668 llvm::Value *result) {
2669 // We must be immediately followed the cast.
2670 llvm::BasicBlock *BB = CGF.Builder.GetInsertBlock();
Craig Topper8a13c412014-05-21 05:09:00 +00002671 if (BB->empty()) return nullptr;
2672 if (&BB->back() != result) return nullptr;
John McCall31168b02011-06-15 23:02:42 +00002673
Chris Lattner2192fe52011-07-18 04:24:23 +00002674 llvm::Type *resultType = result->getType();
John McCall31168b02011-06-15 23:02:42 +00002675
2676 // result is in a BasicBlock and is therefore an Instruction.
2677 llvm::Instruction *generator = cast<llvm::Instruction>(result);
2678
Justin Bogner882f8612016-08-18 21:46:54 +00002679 SmallVector<llvm::Instruction *, 4> InstsToKill;
John McCall31168b02011-06-15 23:02:42 +00002680
2681 // Look for:
2682 // %generator = bitcast %type1* %generator2 to %type2*
2683 while (llvm::BitCastInst *bitcast = dyn_cast<llvm::BitCastInst>(generator)) {
2684 // We would have emitted this as a constant if the operand weren't
2685 // an Instruction.
2686 generator = cast<llvm::Instruction>(bitcast->getOperand(0));
2687
2688 // Require the generator to be immediately followed by the cast.
2689 if (generator->getNextNode() != bitcast)
Craig Topper8a13c412014-05-21 05:09:00 +00002690 return nullptr;
John McCall31168b02011-06-15 23:02:42 +00002691
Justin Bogner882f8612016-08-18 21:46:54 +00002692 InstsToKill.push_back(bitcast);
John McCall31168b02011-06-15 23:02:42 +00002693 }
2694
2695 // Look for:
2696 // %generator = call i8* @objc_retain(i8* %originalResult)
2697 // or
2698 // %generator = call i8* @objc_retainAutoreleasedReturnValue(i8* %originalResult)
2699 llvm::CallInst *call = dyn_cast<llvm::CallInst>(generator);
Craig Topper8a13c412014-05-21 05:09:00 +00002700 if (!call) return nullptr;
John McCall31168b02011-06-15 23:02:42 +00002701
2702 bool doRetainAutorelease;
2703
Craig Toppera58b62b2020-04-27 20:15:59 -07002704 if (call->getCalledOperand() == CGF.CGM.getObjCEntrypoints().objc_retain) {
John McCall31168b02011-06-15 23:02:42 +00002705 doRetainAutorelease = true;
Craig Toppera58b62b2020-04-27 20:15:59 -07002706 } else if (call->getCalledOperand() ==
2707 CGF.CGM.getObjCEntrypoints().objc_retainAutoreleasedReturnValue) {
John McCall31168b02011-06-15 23:02:42 +00002708 doRetainAutorelease = false;
2709
John McCallcfa4e9b2012-09-07 23:30:50 +00002710 // If we emitted an assembly marker for this call (and the
2711 // ARCEntrypoints field should have been set if so), go looking
2712 // for that call. If we can't find it, we can't do this
2713 // optimization. But it should always be the immediately previous
2714 // instruction, unless we needed bitcasts around the call.
John McCallb04ecb72015-10-21 18:06:43 +00002715 if (CGF.CGM.getObjCEntrypoints().retainAutoreleasedReturnValueMarker) {
John McCallcfa4e9b2012-09-07 23:30:50 +00002716 llvm::Instruction *prev = call->getPrevNode();
2717 assert(prev);
2718 if (isa<llvm::BitCastInst>(prev)) {
2719 prev = prev->getPrevNode();
2720 assert(prev);
2721 }
2722 assert(isa<llvm::CallInst>(prev));
Craig Toppera58b62b2020-04-27 20:15:59 -07002723 assert(cast<llvm::CallInst>(prev)->getCalledOperand() ==
2724 CGF.CGM.getObjCEntrypoints().retainAutoreleasedReturnValueMarker);
Justin Bogner882f8612016-08-18 21:46:54 +00002725 InstsToKill.push_back(prev);
John McCallcfa4e9b2012-09-07 23:30:50 +00002726 }
John McCall31168b02011-06-15 23:02:42 +00002727 } else {
Craig Topper8a13c412014-05-21 05:09:00 +00002728 return nullptr;
John McCall31168b02011-06-15 23:02:42 +00002729 }
2730
2731 result = call->getArgOperand(0);
Justin Bogner882f8612016-08-18 21:46:54 +00002732 InstsToKill.push_back(call);
John McCall31168b02011-06-15 23:02:42 +00002733
2734 // Keep killing bitcasts, for sanity. Note that we no longer care
2735 // about precise ordering as long as there's exactly one use.
2736 while (llvm::BitCastInst *bitcast = dyn_cast<llvm::BitCastInst>(result)) {
2737 if (!bitcast->hasOneUse()) break;
Justin Bogner882f8612016-08-18 21:46:54 +00002738 InstsToKill.push_back(bitcast);
John McCall31168b02011-06-15 23:02:42 +00002739 result = bitcast->getOperand(0);
2740 }
2741
2742 // Delete all the unnecessary instructions, from latest to earliest.
Justin Bogner882f8612016-08-18 21:46:54 +00002743 for (auto *I : InstsToKill)
Saleem Abdulrasoolbe25c482016-08-18 21:40:06 +00002744 I->eraseFromParent();
John McCall31168b02011-06-15 23:02:42 +00002745
2746 // Do the fused retain/autorelease if we were asked to.
2747 if (doRetainAutorelease)
2748 result = CGF.EmitARCRetainAutoreleaseReturnValue(result);
2749
2750 // Cast back to the result type.
2751 return CGF.Builder.CreateBitCast(result, resultType);
2752}
2753
John McCallffa2c1a2012-01-29 07:46:59 +00002754/// If this is a +1 of the value of an immutable 'self', remove it.
2755static llvm::Value *tryRemoveRetainOfSelf(CodeGenFunction &CGF,
2756 llvm::Value *result) {
2757 // This is only applicable to a method with an immutable 'self'.
John McCallff755cd2012-07-31 00:33:55 +00002758 const ObjCMethodDecl *method =
2759 dyn_cast_or_null<ObjCMethodDecl>(CGF.CurCodeDecl);
Craig Topper8a13c412014-05-21 05:09:00 +00002760 if (!method) return nullptr;
John McCallffa2c1a2012-01-29 07:46:59 +00002761 const VarDecl *self = method->getSelfDecl();
Craig Topper8a13c412014-05-21 05:09:00 +00002762 if (!self->getType().isConstQualified()) return nullptr;
John McCallffa2c1a2012-01-29 07:46:59 +00002763
2764 // Look for a retain call.
2765 llvm::CallInst *retainCall =
2766 dyn_cast<llvm::CallInst>(result->stripPointerCasts());
Craig Toppera58b62b2020-04-27 20:15:59 -07002767 if (!retainCall || retainCall->getCalledOperand() !=
2768 CGF.CGM.getObjCEntrypoints().objc_retain)
Craig Topper8a13c412014-05-21 05:09:00 +00002769 return nullptr;
John McCallffa2c1a2012-01-29 07:46:59 +00002770
2771 // Look for an ordinary load of 'self'.
2772 llvm::Value *retainedValue = retainCall->getArgOperand(0);
2773 llvm::LoadInst *load =
2774 dyn_cast<llvm::LoadInst>(retainedValue->stripPointerCasts());
Fangrui Song6907ce22018-07-30 19:24:48 +00002775 if (!load || load->isAtomic() || load->isVolatile() ||
John McCall7f416cc2015-09-08 08:05:57 +00002776 load->getPointerOperand() != CGF.GetAddrOfLocalVar(self).getPointer())
Craig Topper8a13c412014-05-21 05:09:00 +00002777 return nullptr;
John McCallffa2c1a2012-01-29 07:46:59 +00002778
2779 // Okay! Burn it all down. This relies for correctness on the
2780 // assumption that the retain is emitted as part of the return and
2781 // that thereafter everything is used "linearly".
2782 llvm::Type *resultType = result->getType();
2783 eraseUnusedBitCasts(cast<llvm::Instruction>(result));
2784 assert(retainCall->use_empty());
2785 retainCall->eraseFromParent();
2786 eraseUnusedBitCasts(cast<llvm::Instruction>(retainedValue));
2787
2788 return CGF.Builder.CreateBitCast(load, resultType);
2789}
2790
John McCall31168b02011-06-15 23:02:42 +00002791/// Emit an ARC autorelease of the result of a function.
John McCallffa2c1a2012-01-29 07:46:59 +00002792///
2793/// \return the value to actually return from the function
John McCall31168b02011-06-15 23:02:42 +00002794static llvm::Value *emitAutoreleaseOfResult(CodeGenFunction &CGF,
2795 llvm::Value *result) {
John McCallffa2c1a2012-01-29 07:46:59 +00002796 // If we're returning 'self', kill the initial retain. This is a
2797 // heuristic attempt to "encourage correctness" in the really unfortunate
2798 // case where we have a return of self during a dealloc and we desperately
2799 // need to avoid the possible autorelease.
2800 if (llvm::Value *self = tryRemoveRetainOfSelf(CGF, result))
2801 return self;
2802
John McCall31168b02011-06-15 23:02:42 +00002803 // At -O0, try to emit a fused retain/autorelease.
2804 if (CGF.shouldUseFusedARCCalls())
2805 if (llvm::Value *fused = tryEmitFusedAutoreleaseOfResult(CGF, result))
2806 return fused;
2807
2808 return CGF.EmitARCAutoreleaseReturnValue(result);
2809}
2810
John McCall6e1c0122012-01-29 02:35:02 +00002811/// Heuristically search for a dominating store to the return-value slot.
2812static llvm::StoreInst *findDominatingStoreToReturnValue(CodeGenFunction &CGF) {
Jakub Kuderskif50ab0f2015-09-08 10:36:42 +00002813 // Check if a User is a store which pointerOperand is the ReturnValue.
2814 // We are looking for stores to the ReturnValue, not for stores of the
2815 // ReturnValue to some other location.
2816 auto GetStoreIfValid = [&CGF](llvm::User *U) -> llvm::StoreInst * {
2817 auto *SI = dyn_cast<llvm::StoreInst>(U);
2818 if (!SI || SI->getPointerOperand() != CGF.ReturnValue.getPointer())
2819 return nullptr;
2820 // These aren't actually possible for non-coerced returns, and we
2821 // only care about non-coerced returns on this code path.
2822 assert(!SI->isAtomic() && !SI->isVolatile());
2823 return SI;
2824 };
John McCall6e1c0122012-01-29 02:35:02 +00002825 // If there are multiple uses of the return-value slot, just check
2826 // for something immediately preceding the IP. Sometimes this can
2827 // happen with how we generate implicit-returns; it can also happen
2828 // with noreturn cleanups.
John McCall7f416cc2015-09-08 08:05:57 +00002829 if (!CGF.ReturnValue.getPointer()->hasOneUse()) {
John McCall6e1c0122012-01-29 02:35:02 +00002830 llvm::BasicBlock *IP = CGF.Builder.GetInsertBlock();
Craig Topper8a13c412014-05-21 05:09:00 +00002831 if (IP->empty()) return nullptr;
David Majnemerdc012fa2015-04-22 21:38:15 +00002832 llvm::Instruction *I = &IP->back();
2833
2834 // Skip lifetime markers
2835 for (llvm::BasicBlock::reverse_iterator II = IP->rbegin(),
2836 IE = IP->rend();
2837 II != IE; ++II) {
2838 if (llvm::IntrinsicInst *Intrinsic =
2839 dyn_cast<llvm::IntrinsicInst>(&*II)) {
2840 if (Intrinsic->getIntrinsicID() == llvm::Intrinsic::lifetime_end) {
2841 const llvm::Value *CastAddr = Intrinsic->getArgOperand(1);
2842 ++II;
Alexey Samsonov10544202015-06-12 21:05:32 +00002843 if (II == IE)
2844 break;
2845 if (isa<llvm::BitCastInst>(&*II) && (CastAddr == &*II))
2846 continue;
David Majnemerdc012fa2015-04-22 21:38:15 +00002847 }
2848 }
2849 I = &*II;
2850 break;
2851 }
2852
Jakub Kuderskif50ab0f2015-09-08 10:36:42 +00002853 return GetStoreIfValid(I);
John McCall6e1c0122012-01-29 02:35:02 +00002854 }
2855
2856 llvm::StoreInst *store =
Jakub Kuderskif50ab0f2015-09-08 10:36:42 +00002857 GetStoreIfValid(CGF.ReturnValue.getPointer()->user_back());
Craig Topper8a13c412014-05-21 05:09:00 +00002858 if (!store) return nullptr;
John McCall6e1c0122012-01-29 02:35:02 +00002859
John McCall6e1c0122012-01-29 02:35:02 +00002860 // Now do a first-and-dirty dominance check: just walk up the
2861 // single-predecessors chain from the current insertion point.
2862 llvm::BasicBlock *StoreBB = store->getParent();
2863 llvm::BasicBlock *IP = CGF.Builder.GetInsertBlock();
2864 while (IP != StoreBB) {
2865 if (!(IP = IP->getSinglePredecessor()))
Craig Topper8a13c412014-05-21 05:09:00 +00002866 return nullptr;
John McCall6e1c0122012-01-29 02:35:02 +00002867 }
2868
2869 // Okay, the store's basic block dominates the insertion point; we
2870 // can do our thing.
2871 return store;
2872}
2873
Adrian Prantl3be10542013-05-02 17:30:20 +00002874void CodeGenFunction::EmitFunctionEpilog(const CGFunctionInfo &FI,
Nick Lewycky2d84e842013-10-02 02:29:49 +00002875 bool EmitRetDbgLoc,
2876 SourceLocation EndLoc) {
Vedant Kumar09b5bfd2017-12-21 00:10:25 +00002877 if (FI.isNoReturn()) {
2878 // Noreturn functions don't return.
2879 EmitUnreachable(EndLoc);
2880 return;
2881 }
2882
Hans Wennborgd71907d2014-09-04 22:16:33 +00002883 if (CurCodeDecl && CurCodeDecl->hasAttr<NakedAttr>()) {
2884 // Naked functions don't have epilogues.
2885 Builder.CreateUnreachable();
2886 return;
2887 }
2888
Daniel Dunbara72d4ae2008-09-10 02:41:04 +00002889 // Functions with no result always return void.
John McCall7f416cc2015-09-08 08:05:57 +00002890 if (!ReturnValue.isValid()) {
Daniel Dunbara72d4ae2008-09-10 02:41:04 +00002891 Builder.CreateRetVoid();
Chris Lattner726b3d02010-06-26 23:13:19 +00002892 return;
Daniel Dunbara72d4ae2008-09-10 02:41:04 +00002893 }
Daniel Dunbar6696e222010-06-30 21:27:58 +00002894
Dan Gohman481e40c2010-07-20 20:13:52 +00002895 llvm::DebugLoc RetDbgLoc;
Craig Topper8a13c412014-05-21 05:09:00 +00002896 llvm::Value *RV = nullptr;
Chris Lattner726b3d02010-06-26 23:13:19 +00002897 QualType RetTy = FI.getReturnType();
2898 const ABIArgInfo &RetAI = FI.getReturnInfo();
2899
2900 switch (RetAI.getKind()) {
Reid Kleckner314ef7b2014-02-01 00:04:45 +00002901 case ABIArgInfo::InAlloca:
Reid Klecknerfab1e892014-02-25 00:59:14 +00002902 // Aggregrates get evaluated directly into the destination. Sometimes we
2903 // need to return the sret value in a register, though.
2904 assert(hasAggregateEvaluationKind(RetTy));
2905 if (RetAI.getInAllocaSRet()) {
2906 llvm::Function::arg_iterator EI = CurFn->arg_end();
2907 --EI;
Duncan P. N. Exon Smith9f5260a2015-11-06 23:00:41 +00002908 llvm::Value *ArgStruct = &*EI;
David Blaikie2e804282015-04-05 22:47:07 +00002909 llvm::Value *SRet = Builder.CreateStructGEP(
2910 nullptr, ArgStruct, RetAI.getInAllocaFieldIndex());
John McCall7f416cc2015-09-08 08:05:57 +00002911 RV = Builder.CreateAlignedLoad(SRet, getPointerAlign(), "sret");
Reid Klecknerfab1e892014-02-25 00:59:14 +00002912 }
Reid Kleckner314ef7b2014-02-01 00:04:45 +00002913 break;
2914
Daniel Dunbar03816342010-08-21 02:24:36 +00002915 case ABIArgInfo::Indirect: {
Reid Kleckner37abaca2014-05-09 22:46:15 +00002916 auto AI = CurFn->arg_begin();
2917 if (RetAI.isSRetAfterThis())
2918 ++AI;
John McCall47fb9502013-03-07 21:37:08 +00002919 switch (getEvaluationKind(RetTy)) {
2920 case TEK_Complex: {
2921 ComplexPairTy RT =
John McCall7f416cc2015-09-08 08:05:57 +00002922 EmitLoadOfComplex(MakeAddrLValue(ReturnValue, RetTy), EndLoc);
Duncan P. N. Exon Smith9f5260a2015-11-06 23:00:41 +00002923 EmitStoreOfComplex(RT, MakeNaturalAlignAddrLValue(&*AI, RetTy),
John McCall47fb9502013-03-07 21:37:08 +00002924 /*isInit*/ true);
2925 break;
2926 }
2927 case TEK_Aggregate:
Chris Lattner726b3d02010-06-26 23:13:19 +00002928 // Do nothing; aggregrates get evaluated directly into the destination.
John McCall47fb9502013-03-07 21:37:08 +00002929 break;
2930 case TEK_Scalar:
2931 EmitStoreOfScalar(Builder.CreateLoad(ReturnValue),
Duncan P. N. Exon Smith9f5260a2015-11-06 23:00:41 +00002932 MakeNaturalAlignAddrLValue(&*AI, RetTy),
John McCall47fb9502013-03-07 21:37:08 +00002933 /*isInit*/ true);
2934 break;
Chris Lattner726b3d02010-06-26 23:13:19 +00002935 }
2936 break;
Daniel Dunbar03816342010-08-21 02:24:36 +00002937 }
Chris Lattner726b3d02010-06-26 23:13:19 +00002938
2939 case ABIArgInfo::Extend:
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00002940 case ABIArgInfo::Direct:
Chris Lattner8a2f3c72010-07-30 04:02:24 +00002941 if (RetAI.getCoerceToType() == ConvertType(RetTy) &&
2942 RetAI.getDirectOffset() == 0) {
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00002943 // The internal return value temp always will have pointer-to-return-type
2944 // type, just do a load.
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00002945
John McCall6e1c0122012-01-29 02:35:02 +00002946 // If there is a dominating store to ReturnValue, we can elide
2947 // the load, zap the store, and usually zap the alloca.
David Majnemerdc012fa2015-04-22 21:38:15 +00002948 if (llvm::StoreInst *SI =
2949 findDominatingStoreToReturnValue(*this)) {
Adrian Prantl4c9a38a2013-05-30 18:12:23 +00002950 // Reuse the debug location from the store unless there is
2951 // cleanup code to be emitted between the store and return
2952 // instruction.
2953 if (EmitRetDbgLoc && !AutoreleaseResult)
Adrian Prantl3be10542013-05-02 17:30:20 +00002954 RetDbgLoc = SI->getDebugLoc();
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00002955 // Get the stored value and nuke the now-dead store.
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00002956 RV = SI->getValueOperand();
2957 SI->eraseFromParent();
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00002958
John McCall6e1c0122012-01-29 02:35:02 +00002959 // Otherwise, we have to do a simple load.
2960 } else {
2961 RV = Builder.CreateLoad(ReturnValue);
Chris Lattner3fcc7902010-06-27 01:06:27 +00002962 }
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00002963 } else {
Chris Lattner8a2f3c72010-07-30 04:02:24 +00002964 // If the value is offset in memory, apply the offset now.
John McCall7f416cc2015-09-08 08:05:57 +00002965 Address V = emitAddressAtOffset(*this, ReturnValue, RetAI);
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00002966
John McCall7f416cc2015-09-08 08:05:57 +00002967 RV = CreateCoercedLoad(V, RetAI.getCoerceToType(), *this);
Chris Lattner3fcc7902010-06-27 01:06:27 +00002968 }
John McCall31168b02011-06-15 23:02:42 +00002969
2970 // In ARC, end functions that return a retainable type with a call
2971 // to objc_autoreleaseReturnValue.
2972 if (AutoreleaseResult) {
Akira Hatanaka9d8ac612016-02-17 21:09:50 +00002973#ifndef NDEBUG
2974 // Type::isObjCRetainabletype has to be called on a QualType that hasn't
2975 // been stripped of the typedefs, so we cannot use RetTy here. Get the
2976 // original return type of FunctionDecl, CurCodeDecl, and BlockDecl from
2977 // CurCodeDecl or BlockInfo.
2978 QualType RT;
2979
2980 if (auto *FD = dyn_cast<FunctionDecl>(CurCodeDecl))
2981 RT = FD->getReturnType();
2982 else if (auto *MD = dyn_cast<ObjCMethodDecl>(CurCodeDecl))
2983 RT = MD->getReturnType();
2984 else if (isa<BlockDecl>(CurCodeDecl))
2985 RT = BlockInfo->BlockExpression->getFunctionType()->getReturnType();
2986 else
2987 llvm_unreachable("Unexpected function/method type");
2988
David Blaikiebbafb8a2012-03-11 07:00:24 +00002989 assert(getLangOpts().ObjCAutoRefCount &&
John McCall31168b02011-06-15 23:02:42 +00002990 !FI.isReturnsRetained() &&
Akira Hatanaka9d8ac612016-02-17 21:09:50 +00002991 RT->isObjCRetainableType());
2992#endif
John McCall31168b02011-06-15 23:02:42 +00002993 RV = emitAutoreleaseOfResult(*this, RV);
2994 }
2995
Chris Lattner726b3d02010-06-26 23:13:19 +00002996 break;
Chris Lattner726b3d02010-06-26 23:13:19 +00002997
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00002998 case ABIArgInfo::Ignore:
Chris Lattner726b3d02010-06-26 23:13:19 +00002999 break;
3000
John McCallf26e73d2016-03-11 04:30:43 +00003001 case ABIArgInfo::CoerceAndExpand: {
3002 auto coercionType = RetAI.getCoerceAndExpandType();
John McCallf26e73d2016-03-11 04:30:43 +00003003
3004 // Load all of the coerced elements out into results.
3005 llvm::SmallVector<llvm::Value*, 4> results;
3006 Address addr = Builder.CreateElementBitCast(ReturnValue, coercionType);
3007 for (unsigned i = 0, e = coercionType->getNumElements(); i != e; ++i) {
3008 auto coercedEltType = coercionType->getElementType(i);
3009 if (ABIArgInfo::isPaddingForCoerceAndExpand(coercedEltType))
3010 continue;
3011
James Y Knight751fe282019-02-09 22:22:28 +00003012 auto eltAddr = Builder.CreateStructGEP(addr, i);
John McCallf26e73d2016-03-11 04:30:43 +00003013 auto elt = Builder.CreateLoad(eltAddr);
3014 results.push_back(elt);
3015 }
3016
3017 // If we have one result, it's the single direct result type.
3018 if (results.size() == 1) {
3019 RV = results[0];
3020
3021 // Otherwise, we need to make a first-class aggregate.
3022 } else {
3023 // Construct a return type that lacks padding elements.
3024 llvm::Type *returnType = RetAI.getUnpaddedCoerceAndExpandType();
3025
3026 RV = llvm::UndefValue::get(returnType);
3027 for (unsigned i = 0, e = results.size(); i != e; ++i) {
3028 RV = Builder.CreateInsertValue(RV, results[i], i);
3029 }
3030 }
3031 break;
3032 }
3033
Chris Lattner726b3d02010-06-26 23:13:19 +00003034 case ABIArgInfo::Expand:
David Blaikie83d382b2011-09-23 05:06:16 +00003035 llvm_unreachable("Invalid ABI kind for return argument");
Chris Lattner726b3d02010-06-26 23:13:19 +00003036 }
3037
Alexey Samsonovde443c52014-08-13 00:26:40 +00003038 llvm::Instruction *Ret;
3039 if (RV) {
Vedant Kumarc34d3432017-06-23 21:32:38 +00003040 EmitReturnValueCheck(RV);
Alexey Samsonovde443c52014-08-13 00:26:40 +00003041 Ret = Builder.CreateRet(RV);
3042 } else {
3043 Ret = Builder.CreateRetVoid();
3044 }
3045
Duncan P. N. Exon Smith2809cc72015-03-30 20:01:41 +00003046 if (RetDbgLoc)
Benjamin Kramer03278662015-02-07 13:15:54 +00003047 Ret->setDebugLoc(std::move(RetDbgLoc));
Daniel Dunbar613855c2008-09-09 23:27:19 +00003048}
3049
Vedant Kumarc34d3432017-06-23 21:32:38 +00003050void CodeGenFunction::EmitReturnValueCheck(llvm::Value *RV) {
Vedant Kumar42c17ec2017-03-14 01:56:34 +00003051 // A current decl may not be available when emitting vtable thunks.
3052 if (!CurCodeDecl)
3053 return;
3054
Vedant Kumar65f07852020-02-06 10:17:36 -08003055 // If the return block isn't reachable, neither is this check, so don't emit
3056 // it.
3057 if (ReturnBlock.isValid() && ReturnBlock.getBlock()->use_empty())
3058 return;
3059
Vedant Kumar42c17ec2017-03-14 01:56:34 +00003060 ReturnsNonNullAttr *RetNNAttr = nullptr;
3061 if (SanOpts.has(SanitizerKind::ReturnsNonnullAttribute))
3062 RetNNAttr = CurCodeDecl->getAttr<ReturnsNonNullAttr>();
3063
3064 if (!RetNNAttr && !requiresReturnValueNullabilityCheck())
3065 return;
3066
3067 // Prefer the returns_nonnull attribute if it's present.
3068 SourceLocation AttrLoc;
3069 SanitizerMask CheckKind;
Vedant Kumar2b9f48a2017-03-14 16:48:29 +00003070 SanitizerHandler Handler;
Vedant Kumar42c17ec2017-03-14 01:56:34 +00003071 if (RetNNAttr) {
3072 assert(!requiresReturnValueNullabilityCheck() &&
3073 "Cannot check nullability and the nonnull attribute");
3074 AttrLoc = RetNNAttr->getLocation();
3075 CheckKind = SanitizerKind::ReturnsNonnullAttribute;
Vedant Kumar2b9f48a2017-03-14 16:48:29 +00003076 Handler = SanitizerHandler::NonnullReturn;
Vedant Kumar42c17ec2017-03-14 01:56:34 +00003077 } else {
Vedant Kumar42c17ec2017-03-14 01:56:34 +00003078 if (auto *DD = dyn_cast<DeclaratorDecl>(CurCodeDecl))
3079 if (auto *TSI = DD->getTypeSourceInfo())
Vedant Kumar8b81ebf2020-02-10 13:10:47 -08003080 if (auto FTL = TSI->getTypeLoc().getAsAdjusted<FunctionTypeLoc>())
Vedant Kumar42c17ec2017-03-14 01:56:34 +00003081 AttrLoc = FTL.getReturnLoc().findNullabilityLoc();
3082 CheckKind = SanitizerKind::NullabilityReturn;
Vedant Kumar2b9f48a2017-03-14 16:48:29 +00003083 Handler = SanitizerHandler::NullabilityReturn;
Vedant Kumar42c17ec2017-03-14 01:56:34 +00003084 }
3085
3086 SanitizerScope SanScope(this);
3087
Vedant Kumarc34d3432017-06-23 21:32:38 +00003088 // Make sure the "return" source location is valid. If we're checking a
3089 // nullability annotation, make sure the preconditions for the check are met.
3090 llvm::BasicBlock *Check = createBasicBlock("nullcheck");
3091 llvm::BasicBlock *NoCheck = createBasicBlock("no.nullcheck");
3092 llvm::Value *SLocPtr = Builder.CreateLoad(ReturnLocation, "return.sloc.load");
3093 llvm::Value *CanNullCheck = Builder.CreateIsNotNull(SLocPtr);
Vedant Kumar42c17ec2017-03-14 01:56:34 +00003094 if (requiresReturnValueNullabilityCheck())
Vedant Kumarc34d3432017-06-23 21:32:38 +00003095 CanNullCheck =
3096 Builder.CreateAnd(CanNullCheck, RetValNullabilityPrecondition);
3097 Builder.CreateCondBr(CanNullCheck, Check, NoCheck);
3098 EmitBlock(Check);
3099
3100 // Now do the null check.
3101 llvm::Value *Cond = Builder.CreateIsNotNull(RV);
3102 llvm::Constant *StaticData[] = {EmitCheckSourceLocation(AttrLoc)};
3103 llvm::Value *DynamicData[] = {SLocPtr};
3104 EmitCheck(std::make_pair(Cond, CheckKind), Handler, StaticData, DynamicData);
3105
3106 EmitBlock(NoCheck);
3107
3108#ifndef NDEBUG
3109 // The return location should not be used after the check has been emitted.
3110 ReturnLocation = Address::invalid();
3111#endif
Vedant Kumar42c17ec2017-03-14 01:56:34 +00003112}
3113
Reid Kleckner314ef7b2014-02-01 00:04:45 +00003114static bool isInAllocaArgument(CGCXXABI &ABI, QualType type) {
3115 const CXXRecordDecl *RD = type->getAsCXXRecordDecl();
3116 return RD && ABI.getRecordArgABI(RD) == CGCXXABI::RAA_DirectInMemory;
3117}
3118
John McCall7f416cc2015-09-08 08:05:57 +00003119static AggValueSlot createPlaceholderSlot(CodeGenFunction &CGF,
3120 QualType Ty) {
Reid Kleckner314ef7b2014-02-01 00:04:45 +00003121 // FIXME: Generate IR in one pass, rather than going back and fixing up these
3122 // placeholders.
3123 llvm::Type *IRTy = CGF.ConvertTypeForMem(Ty);
Peter Collingbourneb367c562016-11-28 22:30:21 +00003124 llvm::Type *IRPtrTy = IRTy->getPointerTo();
3125 llvm::Value *Placeholder = llvm::UndefValue::get(IRPtrTy->getPointerTo());
John McCall7f416cc2015-09-08 08:05:57 +00003126
3127 // FIXME: When we generate this IR in one pass, we shouldn't need
3128 // this win32-specific alignment hack.
3129 CharUnits Align = CharUnits::fromQuantity(4);
Peter Collingbourneb367c562016-11-28 22:30:21 +00003130 Placeholder = CGF.Builder.CreateAlignedLoad(IRPtrTy, Placeholder, Align);
John McCall7f416cc2015-09-08 08:05:57 +00003131
3132 return AggValueSlot::forAddr(Address(Placeholder, Align),
Reid Kleckner314ef7b2014-02-01 00:04:45 +00003133 Ty.getQualifiers(),
3134 AggValueSlot::IsNotDestructed,
3135 AggValueSlot::DoesNotNeedGCBarriers,
Richard Smithe78fac52018-04-05 20:52:58 +00003136 AggValueSlot::IsNotAliased,
3137 AggValueSlot::DoesNotOverlap);
Reid Kleckner314ef7b2014-02-01 00:04:45 +00003138}
3139
John McCall32ea9692011-03-11 20:59:21 +00003140void CodeGenFunction::EmitDelegateCallArg(CallArgList &args,
Nick Lewycky2d84e842013-10-02 02:29:49 +00003141 const VarDecl *param,
3142 SourceLocation loc) {
John McCall23f66262010-05-26 22:34:26 +00003143 // StartFunction converted the ABI-lowered parameter(s) into a
3144 // local alloca. We need to turn that into an r-value suitable
3145 // for EmitCall.
John McCall7f416cc2015-09-08 08:05:57 +00003146 Address local = GetAddrOfLocalVar(param);
John McCall23f66262010-05-26 22:34:26 +00003147
John McCall32ea9692011-03-11 20:59:21 +00003148 QualType type = param->getType();
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00003149
Reid Kleckner25b56022018-12-12 23:46:06 +00003150 if (isInAllocaArgument(CGM.getCXXABI(), type)) {
3151 CGM.ErrorUnsupported(param, "forwarded non-trivially copyable parameter");
3152 }
Reid Kleckner314ef7b2014-02-01 00:04:45 +00003153
John McCall811b2912016-11-18 01:08:24 +00003154 // GetAddrOfLocalVar returns a pointer-to-pointer for references,
3155 // but the argument needs to be the original pointer.
3156 if (type->isReferenceType()) {
3157 args.add(RValue::get(Builder.CreateLoad(local)), type);
3158
3159 // In ARC, move out of consumed arguments so that the release cleanup
3160 // entered by StartFunction doesn't cause an over-release. This isn't
3161 // optimal -O0 code generation, but it should get cleaned up when
3162 // optimization is enabled. This also assumes that delegate calls are
3163 // performed exactly once for a set of arguments, but that should be safe.
3164 } else if (getLangOpts().ObjCAutoRefCount &&
3165 param->hasAttr<NSConsumedAttr>() &&
3166 type->isObjCRetainableType()) {
3167 llvm::Value *ptr = Builder.CreateLoad(local);
3168 auto null =
3169 llvm::ConstantPointerNull::get(cast<llvm::PointerType>(ptr->getType()));
3170 Builder.CreateStore(null, local);
3171 args.add(RValue::get(ptr), type);
3172
Richard Smithd62d4982016-06-14 01:13:21 +00003173 // For the most part, we just need to load the alloca, except that
3174 // aggregate r-values are actually pointers to temporaries.
John McCall811b2912016-11-18 01:08:24 +00003175 } else {
Richard Smithd62d4982016-06-14 01:13:21 +00003176 args.add(convertTempToRValue(local, type, loc), type);
John McCall811b2912016-11-18 01:08:24 +00003177 }
Akira Hatanakaccda3d22018-04-27 06:57:00 +00003178
3179 // Deactivate the cleanup for the callee-destructed param that was pushed.
3180 if (hasAggregateEvaluationKind(type) && !CurFuncIsThunk &&
Simon Pilgrim1cd399c2019-10-03 11:22:48 +00003181 type->castAs<RecordType>()->getDecl()->isParamDestroyedInCallee() &&
Richard Smith2b4fa532019-09-29 05:08:46 +00003182 param->needsDestruction(getContext())) {
Akira Hatanakaccda3d22018-04-27 06:57:00 +00003183 EHScopeStack::stable_iterator cleanup =
3184 CalleeDestructedParamCleanups.lookup(cast<ParmVarDecl>(param));
3185 assert(cleanup.isValid() &&
3186 "cleanup for callee-destructed param not recorded");
3187 // This unreachable is a temporary marker which will be removed later.
3188 llvm::Instruction *isActive = Builder.CreateUnreachable();
3189 args.addArgCleanupDeactivation(cleanup, isActive);
3190 }
John McCall23f66262010-05-26 22:34:26 +00003191}
3192
John McCall31168b02011-06-15 23:02:42 +00003193static bool isProvablyNull(llvm::Value *addr) {
3194 return isa<llvm::ConstantPointerNull>(addr);
3195}
3196
John McCall31168b02011-06-15 23:02:42 +00003197/// Emit the actual writing-back of a writeback.
3198static void emitWriteback(CodeGenFunction &CGF,
3199 const CallArgList::Writeback &writeback) {
John McCalleff18842013-03-23 02:35:54 +00003200 const LValue &srcLV = writeback.Source;
Akira Hatanakaf139ae32019-12-03 15:17:01 -08003201 Address srcAddr = srcLV.getAddress(CGF);
John McCall7f416cc2015-09-08 08:05:57 +00003202 assert(!isProvablyNull(srcAddr.getPointer()) &&
John McCall31168b02011-06-15 23:02:42 +00003203 "shouldn't have writeback for provably null argument");
3204
Craig Topper8a13c412014-05-21 05:09:00 +00003205 llvm::BasicBlock *contBB = nullptr;
John McCall31168b02011-06-15 23:02:42 +00003206
3207 // If the argument wasn't provably non-null, we need to null check
3208 // before doing the store.
Nuno Lopes9211cee2017-09-09 18:25:36 +00003209 bool provablyNonNull = llvm::isKnownNonZero(srcAddr.getPointer(),
3210 CGF.CGM.getDataLayout());
John McCall31168b02011-06-15 23:02:42 +00003211 if (!provablyNonNull) {
3212 llvm::BasicBlock *writebackBB = CGF.createBasicBlock("icr.writeback");
3213 contBB = CGF.createBasicBlock("icr.done");
3214
John McCall7f416cc2015-09-08 08:05:57 +00003215 llvm::Value *isNull =
3216 CGF.Builder.CreateIsNull(srcAddr.getPointer(), "icr.isnull");
John McCall31168b02011-06-15 23:02:42 +00003217 CGF.Builder.CreateCondBr(isNull, contBB, writebackBB);
3218 CGF.EmitBlock(writebackBB);
3219 }
3220
3221 // Load the value to writeback.
3222 llvm::Value *value = CGF.Builder.CreateLoad(writeback.Temporary);
3223
3224 // Cast it back, in case we're writing an id to a Foo* or something.
John McCall7f416cc2015-09-08 08:05:57 +00003225 value = CGF.Builder.CreateBitCast(value, srcAddr.getElementType(),
3226 "icr.writeback-cast");
Fangrui Song6907ce22018-07-30 19:24:48 +00003227
John McCall31168b02011-06-15 23:02:42 +00003228 // Perform the writeback.
John McCalleff18842013-03-23 02:35:54 +00003229
3230 // If we have a "to use" value, it's something we need to emit a use
3231 // of. This has to be carefully threaded in: if it's done after the
3232 // release it's potentially undefined behavior (and the optimizer
3233 // will ignore it), and if it happens before the retain then the
3234 // optimizer could move the release there.
3235 if (writeback.ToUse) {
3236 assert(srcLV.getObjCLifetime() == Qualifiers::OCL_Strong);
3237
3238 // Retain the new value. No need to block-copy here: the block's
3239 // being passed up the stack.
3240 value = CGF.EmitARCRetainNonBlock(value);
3241
3242 // Emit the intrinsic use here.
3243 CGF.EmitARCIntrinsicUse(writeback.ToUse);
3244
3245 // Load the old value (primitively).
Nick Lewycky2d84e842013-10-02 02:29:49 +00003246 llvm::Value *oldValue = CGF.EmitLoadOfScalar(srcLV, SourceLocation());
John McCalleff18842013-03-23 02:35:54 +00003247
3248 // Put the new value in place (primitively).
3249 CGF.EmitStoreOfScalar(value, srcLV, /*init*/ false);
3250
3251 // Release the old value.
3252 CGF.EmitARCRelease(oldValue, srcLV.isARCPreciseLifetime());
3253
3254 // Otherwise, we can just do a normal lvalue store.
3255 } else {
3256 CGF.EmitStoreThroughLValue(RValue::get(value), srcLV);
3257 }
John McCall31168b02011-06-15 23:02:42 +00003258
3259 // Jump to the continuation block.
3260 if (!provablyNonNull)
3261 CGF.EmitBlock(contBB);
3262}
3263
3264static void emitWritebacks(CodeGenFunction &CGF,
3265 const CallArgList &args) {
Aaron Ballman36a7fa82014-03-17 17:22:27 +00003266 for (const auto &I : args.writebacks())
3267 emitWriteback(CGF, I);
John McCall31168b02011-06-15 23:02:42 +00003268}
3269
Reid Kleckner23f4c4b2013-06-21 12:45:15 +00003270static void deactivateArgCleanupsBeforeCall(CodeGenFunction &CGF,
3271 const CallArgList &CallArgs) {
Reid Kleckner23f4c4b2013-06-21 12:45:15 +00003272 ArrayRef<CallArgList::CallArgCleanup> Cleanups =
3273 CallArgs.getCleanupsToDeactivate();
3274 // Iterate in reverse to increase the likelihood of popping the cleanup.
Pete Cooper57d3f142015-07-30 17:22:52 +00003275 for (const auto &I : llvm::reverse(Cleanups)) {
3276 CGF.DeactivateCleanupBlock(I.Cleanup, I.IsActiveIP);
3277 I.IsActiveIP->eraseFromParent();
Reid Kleckner23f4c4b2013-06-21 12:45:15 +00003278 }
3279}
3280
John McCalleff18842013-03-23 02:35:54 +00003281static const Expr *maybeGetUnaryAddrOfOperand(const Expr *E) {
3282 if (const UnaryOperator *uop = dyn_cast<UnaryOperator>(E->IgnoreParens()))
3283 if (uop->getOpcode() == UO_AddrOf)
3284 return uop->getSubExpr();
Craig Topper8a13c412014-05-21 05:09:00 +00003285 return nullptr;
John McCalleff18842013-03-23 02:35:54 +00003286}
3287
John McCall31168b02011-06-15 23:02:42 +00003288/// Emit an argument that's being passed call-by-writeback. That is,
John McCall7f416cc2015-09-08 08:05:57 +00003289/// we are passing the address of an __autoreleased temporary; it
3290/// might be copy-initialized with the current value of the given
3291/// address, but it will definitely be copied out of after the call.
John McCall31168b02011-06-15 23:02:42 +00003292static void emitWritebackArg(CodeGenFunction &CGF, CallArgList &args,
3293 const ObjCIndirectCopyRestoreExpr *CRE) {
John McCalleff18842013-03-23 02:35:54 +00003294 LValue srcLV;
3295
3296 // Make an optimistic effort to emit the address as an l-value.
Eric Christopher2c4555a2015-06-19 01:52:53 +00003297 // This can fail if the argument expression is more complicated.
John McCalleff18842013-03-23 02:35:54 +00003298 if (const Expr *lvExpr = maybeGetUnaryAddrOfOperand(CRE->getSubExpr())) {
3299 srcLV = CGF.EmitLValue(lvExpr);
3300
3301 // Otherwise, just emit it as a scalar.
3302 } else {
John McCall7f416cc2015-09-08 08:05:57 +00003303 Address srcAddr = CGF.EmitPointerWithAlignment(CRE->getSubExpr());
John McCalleff18842013-03-23 02:35:54 +00003304
3305 QualType srcAddrType =
3306 CRE->getSubExpr()->getType()->castAs<PointerType>()->getPointeeType();
John McCall7f416cc2015-09-08 08:05:57 +00003307 srcLV = CGF.MakeAddrLValue(srcAddr, srcAddrType);
John McCalleff18842013-03-23 02:35:54 +00003308 }
Akira Hatanakaf139ae32019-12-03 15:17:01 -08003309 Address srcAddr = srcLV.getAddress(CGF);
John McCall31168b02011-06-15 23:02:42 +00003310
3311 // The dest and src types don't necessarily match in LLVM terms
3312 // because of the crazy ObjC compatibility rules.
3313
Chris Lattner2192fe52011-07-18 04:24:23 +00003314 llvm::PointerType *destType =
John McCall31168b02011-06-15 23:02:42 +00003315 cast<llvm::PointerType>(CGF.ConvertType(CRE->getType()));
3316
3317 // If the address is a constant null, just pass the appropriate null.
John McCall7f416cc2015-09-08 08:05:57 +00003318 if (isProvablyNull(srcAddr.getPointer())) {
John McCall31168b02011-06-15 23:02:42 +00003319 args.add(RValue::get(llvm::ConstantPointerNull::get(destType)),
3320 CRE->getType());
3321 return;
3322 }
3323
John McCall31168b02011-06-15 23:02:42 +00003324 // Create the temporary.
John McCall7f416cc2015-09-08 08:05:57 +00003325 Address temp = CGF.CreateTempAlloca(destType->getElementType(),
3326 CGF.getPointerAlign(),
3327 "icr.temp");
Fariborz Jahanianfbd19742012-11-27 23:02:53 +00003328 // Loading an l-value can introduce a cleanup if the l-value is __weak,
3329 // and that cleanup will be conditional if we can't prove that the l-value
3330 // isn't null, so we need to register a dominating point so that the cleanups
3331 // system will make valid IR.
3332 CodeGenFunction::ConditionalEvaluation condEval(CGF);
Fangrui Song6907ce22018-07-30 19:24:48 +00003333
John McCall31168b02011-06-15 23:02:42 +00003334 // Zero-initialize it if we're not doing a copy-initialization.
3335 bool shouldCopy = CRE->shouldCopy();
3336 if (!shouldCopy) {
3337 llvm::Value *null =
3338 llvm::ConstantPointerNull::get(
3339 cast<llvm::PointerType>(destType->getElementType()));
3340 CGF.Builder.CreateStore(null, temp);
3341 }
Craig Topper8a13c412014-05-21 05:09:00 +00003342
3343 llvm::BasicBlock *contBB = nullptr;
3344 llvm::BasicBlock *originBB = nullptr;
John McCall31168b02011-06-15 23:02:42 +00003345
3346 // If the address is *not* known to be non-null, we need to switch.
3347 llvm::Value *finalArgument;
3348
Nuno Lopes9211cee2017-09-09 18:25:36 +00003349 bool provablyNonNull = llvm::isKnownNonZero(srcAddr.getPointer(),
3350 CGF.CGM.getDataLayout());
John McCall31168b02011-06-15 23:02:42 +00003351 if (provablyNonNull) {
John McCall7f416cc2015-09-08 08:05:57 +00003352 finalArgument = temp.getPointer();
John McCall31168b02011-06-15 23:02:42 +00003353 } else {
John McCall7f416cc2015-09-08 08:05:57 +00003354 llvm::Value *isNull =
3355 CGF.Builder.CreateIsNull(srcAddr.getPointer(), "icr.isnull");
John McCall31168b02011-06-15 23:02:42 +00003356
Fangrui Song6907ce22018-07-30 19:24:48 +00003357 finalArgument = CGF.Builder.CreateSelect(isNull,
John McCall31168b02011-06-15 23:02:42 +00003358 llvm::ConstantPointerNull::get(destType),
John McCall7f416cc2015-09-08 08:05:57 +00003359 temp.getPointer(), "icr.argument");
John McCall31168b02011-06-15 23:02:42 +00003360
3361 // If we need to copy, then the load has to be conditional, which
3362 // means we need control flow.
3363 if (shouldCopy) {
John McCalleff18842013-03-23 02:35:54 +00003364 originBB = CGF.Builder.GetInsertBlock();
John McCall31168b02011-06-15 23:02:42 +00003365 contBB = CGF.createBasicBlock("icr.cont");
3366 llvm::BasicBlock *copyBB = CGF.createBasicBlock("icr.copy");
3367 CGF.Builder.CreateCondBr(isNull, contBB, copyBB);
3368 CGF.EmitBlock(copyBB);
Fariborz Jahanianfbd19742012-11-27 23:02:53 +00003369 condEval.begin(CGF);
John McCall31168b02011-06-15 23:02:42 +00003370 }
3371 }
3372
Craig Topper8a13c412014-05-21 05:09:00 +00003373 llvm::Value *valueToUse = nullptr;
John McCalleff18842013-03-23 02:35:54 +00003374
John McCall31168b02011-06-15 23:02:42 +00003375 // Perform a copy if necessary.
3376 if (shouldCopy) {
Nick Lewycky2d84e842013-10-02 02:29:49 +00003377 RValue srcRV = CGF.EmitLoadOfLValue(srcLV, SourceLocation());
John McCall31168b02011-06-15 23:02:42 +00003378 assert(srcRV.isScalar());
3379
3380 llvm::Value *src = srcRV.getScalarVal();
3381 src = CGF.Builder.CreateBitCast(src, destType->getElementType(),
3382 "icr.cast");
3383
3384 // Use an ordinary store, not a store-to-lvalue.
3385 CGF.Builder.CreateStore(src, temp);
John McCalleff18842013-03-23 02:35:54 +00003386
3387 // If optimization is enabled, and the value was held in a
3388 // __strong variable, we need to tell the optimizer that this
3389 // value has to stay alive until we're doing the store back.
3390 // This is because the temporary is effectively unretained,
3391 // and so otherwise we can violate the high-level semantics.
3392 if (CGF.CGM.getCodeGenOpts().OptimizationLevel != 0 &&
3393 srcLV.getObjCLifetime() == Qualifiers::OCL_Strong) {
3394 valueToUse = src;
3395 }
John McCall31168b02011-06-15 23:02:42 +00003396 }
Fangrui Song6907ce22018-07-30 19:24:48 +00003397
John McCall31168b02011-06-15 23:02:42 +00003398 // Finish the control flow if we needed it.
Fariborz Jahanianfbd19742012-11-27 23:02:53 +00003399 if (shouldCopy && !provablyNonNull) {
John McCalleff18842013-03-23 02:35:54 +00003400 llvm::BasicBlock *copyBB = CGF.Builder.GetInsertBlock();
John McCall31168b02011-06-15 23:02:42 +00003401 CGF.EmitBlock(contBB);
John McCalleff18842013-03-23 02:35:54 +00003402
3403 // Make a phi for the value to intrinsically use.
3404 if (valueToUse) {
3405 llvm::PHINode *phiToUse = CGF.Builder.CreatePHI(valueToUse->getType(), 2,
3406 "icr.to-use");
3407 phiToUse->addIncoming(valueToUse, copyBB);
3408 phiToUse->addIncoming(llvm::UndefValue::get(valueToUse->getType()),
3409 originBB);
3410 valueToUse = phiToUse;
3411 }
3412
Fariborz Jahanianfbd19742012-11-27 23:02:53 +00003413 condEval.end(CGF);
3414 }
John McCall31168b02011-06-15 23:02:42 +00003415
John McCalleff18842013-03-23 02:35:54 +00003416 args.addWriteback(srcLV, temp, valueToUse);
John McCall31168b02011-06-15 23:02:42 +00003417 args.add(RValue::get(finalArgument), CRE->getType());
3418}
3419
Reid Kleckner314ef7b2014-02-01 00:04:45 +00003420void CallArgList::allocateArgumentMemory(CodeGenFunction &CGF) {
Richard Smith762672a2016-09-28 19:09:10 +00003421 assert(!StackBase);
Reid Kleckner314ef7b2014-02-01 00:04:45 +00003422
3423 // Save the stack.
3424 llvm::Function *F = CGF.CGM.getIntrinsic(llvm::Intrinsic::stacksave);
David Blaikie43f9bb72015-05-18 22:14:03 +00003425 StackBase = CGF.Builder.CreateCall(F, {}, "inalloca.save");
Reid Kleckner314ef7b2014-02-01 00:04:45 +00003426}
3427
Nico Weber8cdb3f92015-08-25 18:43:32 +00003428void CallArgList::freeArgumentMemory(CodeGenFunction &CGF) const {
3429 if (StackBase) {
Reid Kleckner7c2f9e82015-10-08 00:17:45 +00003430 // Restore the stack after the call.
James Y Knight8799cae2019-02-03 21:53:49 +00003431 llvm::Function *F = CGF.CGM.getIntrinsic(llvm::Intrinsic::stackrestore);
Nico Weber8cdb3f92015-08-25 18:43:32 +00003432 CGF.Builder.CreateCall(F, StackBase);
3433 }
3434}
3435
Nuno Lopes1ba2d782015-05-30 16:11:40 +00003436void CodeGenFunction::EmitNonNullArgCheck(RValue RV, QualType ArgType,
3437 SourceLocation ArgLoc,
Vedant Kumared00ea02017-03-06 05:28:22 +00003438 AbstractCallee AC,
Nuno Lopes1ba2d782015-05-30 16:11:40 +00003439 unsigned ParmNum) {
Vedant Kumar42c17ec2017-03-14 01:56:34 +00003440 if (!AC.getDecl() || !(SanOpts.has(SanitizerKind::NonnullAttribute) ||
3441 SanOpts.has(SanitizerKind::NullabilityArg)))
Alexey Samsonov8e1162c2014-09-08 17:22:45 +00003442 return;
Vedant Kumar42c17ec2017-03-14 01:56:34 +00003443
3444 // The param decl may be missing in a variadic function.
Vedant Kumared00ea02017-03-06 05:28:22 +00003445 auto PVD = ParmNum < AC.getNumParams() ? AC.getParamDecl(ParmNum) : nullptr;
Alexey Samsonov8e1162c2014-09-08 17:22:45 +00003446 unsigned ArgNo = PVD ? PVD->getFunctionScopeIndex() : ParmNum;
Vedant Kumar42c17ec2017-03-14 01:56:34 +00003447
Fangrui Song6907ce22018-07-30 19:24:48 +00003448 // Prefer the nonnull attribute if it's present.
Vedant Kumar42c17ec2017-03-14 01:56:34 +00003449 const NonNullAttr *NNAttr = nullptr;
3450 if (SanOpts.has(SanitizerKind::NonnullAttribute))
3451 NNAttr = getNonNullAttr(AC.getDecl(), PVD, ArgType, ArgNo);
3452
3453 bool CanCheckNullability = false;
3454 if (SanOpts.has(SanitizerKind::NullabilityArg) && !NNAttr && PVD) {
3455 auto Nullability = PVD->getType()->getNullability(getContext());
3456 CanCheckNullability = Nullability &&
3457 *Nullability == NullabilityKind::NonNull &&
3458 PVD->getTypeSourceInfo();
3459 }
3460
3461 if (!NNAttr && !CanCheckNullability)
Alexey Samsonov8e1162c2014-09-08 17:22:45 +00003462 return;
Vedant Kumar42c17ec2017-03-14 01:56:34 +00003463
3464 SourceLocation AttrLoc;
3465 SanitizerMask CheckKind;
Vedant Kumar2b9f48a2017-03-14 16:48:29 +00003466 SanitizerHandler Handler;
Vedant Kumar42c17ec2017-03-14 01:56:34 +00003467 if (NNAttr) {
3468 AttrLoc = NNAttr->getLocation();
3469 CheckKind = SanitizerKind::NonnullAttribute;
Vedant Kumar2b9f48a2017-03-14 16:48:29 +00003470 Handler = SanitizerHandler::NonnullArg;
Vedant Kumar42c17ec2017-03-14 01:56:34 +00003471 } else {
3472 AttrLoc = PVD->getTypeSourceInfo()->getTypeLoc().findNullabilityLoc();
3473 CheckKind = SanitizerKind::NullabilityArg;
Vedant Kumar2b9f48a2017-03-14 16:48:29 +00003474 Handler = SanitizerHandler::NullabilityArg;
Vedant Kumar42c17ec2017-03-14 01:56:34 +00003475 }
3476
Nuno Lopes1ba2d782015-05-30 16:11:40 +00003477 SanitizerScope SanScope(this);
Alexey Samsonov8e1162c2014-09-08 17:22:45 +00003478 assert(RV.isScalar());
3479 llvm::Value *V = RV.getScalarVal();
3480 llvm::Value *Cond =
Nuno Lopes1ba2d782015-05-30 16:11:40 +00003481 Builder.CreateICmpNE(V, llvm::Constant::getNullValue(V->getType()));
Alexey Samsonov8e1162c2014-09-08 17:22:45 +00003482 llvm::Constant *StaticData[] = {
Vedant Kumar42c17ec2017-03-14 01:56:34 +00003483 EmitCheckSourceLocation(ArgLoc), EmitCheckSourceLocation(AttrLoc),
Nuno Lopes1ba2d782015-05-30 16:11:40 +00003484 llvm::ConstantInt::get(Int32Ty, ArgNo + 1),
Alexey Samsonov8e1162c2014-09-08 17:22:45 +00003485 };
Vedant Kumar2b9f48a2017-03-14 16:48:29 +00003486 EmitCheck(std::make_pair(Cond, CheckKind), Handler, StaticData, None);
Alexey Samsonov8e1162c2014-09-08 17:22:45 +00003487}
3488
David Blaikief05779e2015-07-21 18:37:18 +00003489void CodeGenFunction::EmitCallArgs(
3490 CallArgList &Args, ArrayRef<QualType> ArgTypes,
3491 llvm::iterator_range<CallExpr::const_arg_iterator> ArgRange,
Vedant Kumared00ea02017-03-06 05:28:22 +00003492 AbstractCallee AC, unsigned ParamsToSkip, EvaluationOrder Order) {
David Blaikief05779e2015-07-21 18:37:18 +00003493 assert((int)ArgTypes.size() == (ArgRange.end() - ArgRange.begin()));
George Burgess IV3e3bb95b2015-12-02 21:58:08 +00003494
Reid Kleckner739756c2013-12-04 19:23:12 +00003495 // We *have* to evaluate arguments from right to left in the MS C++ ABI,
Richard Smitha560ccf2016-09-29 21:30:12 +00003496 // because arguments are destroyed left to right in the callee. As a special
3497 // case, there are certain language constructs that require left-to-right
3498 // evaluation, and in those cases we consider the evaluation order requirement
3499 // to trump the "destruction order is reverse construction order" guarantee.
3500 bool LeftToRight =
3501 CGM.getTarget().getCXXABI().areArgsDestroyedLeftToRightInCallee()
3502 ? Order == EvaluationOrder::ForceLeftToRight
3503 : Order != EvaluationOrder::ForceRightToLeft;
3504
George Burgess IV0d6592a2017-02-23 05:59:56 +00003505 auto MaybeEmitImplicitObjectSize = [&](unsigned I, const Expr *Arg,
3506 RValue EmittedArg) {
Vedant Kumared00ea02017-03-06 05:28:22 +00003507 if (!AC.hasFunctionDecl() || I >= AC.getNumParams())
George Burgess IV0d6592a2017-02-23 05:59:56 +00003508 return;
Vedant Kumared00ea02017-03-06 05:28:22 +00003509 auto *PS = AC.getParamDecl(I)->getAttr<PassObjectSizeAttr>();
George Burgess IV0d6592a2017-02-23 05:59:56 +00003510 if (PS == nullptr)
3511 return;
3512
3513 const auto &Context = getContext();
3514 auto SizeTy = Context.getSizeType();
3515 auto T = Builder.getIntNTy(Context.getTypeSize(SizeTy));
3516 assert(EmittedArg.getScalarVal() && "We emitted nothing for the arg?");
3517 llvm::Value *V = evaluateOrEmitBuiltinObjectSize(Arg, PS->getType(), T,
Erik Pilkington9c3b5882019-01-30 20:34:53 +00003518 EmittedArg.getScalarVal(),
Erik Pilkington02d5fb12019-03-19 20:44:18 +00003519 PS->isDynamic());
George Burgess IV0d6592a2017-02-23 05:59:56 +00003520 Args.add(RValue::get(V), SizeTy);
3521 // If we're emitting args in reverse, be sure to do so with
3522 // pass_object_size, as well.
3523 if (!LeftToRight)
3524 std::swap(Args.back(), *(&Args.back() - 1));
3525 };
3526
Richard Smitha560ccf2016-09-29 21:30:12 +00003527 // Insert a stack save if we're going to need any inalloca args.
3528 bool HasInAllocaArgs = false;
3529 if (CGM.getTarget().getCXXABI().isMicrosoft()) {
Reid Kleckner314ef7b2014-02-01 00:04:45 +00003530 for (ArrayRef<QualType>::iterator I = ArgTypes.begin(), E = ArgTypes.end();
3531 I != E && !HasInAllocaArgs; ++I)
3532 HasInAllocaArgs = isInAllocaArgument(CGM.getCXXABI(), *I);
3533 if (HasInAllocaArgs) {
3534 assert(getTarget().getTriple().getArch() == llvm::Triple::x86);
3535 Args.allocateArgumentMemory(*this);
3536 }
Richard Smitha560ccf2016-09-29 21:30:12 +00003537 }
Reid Kleckner314ef7b2014-02-01 00:04:45 +00003538
Richard Smitha560ccf2016-09-29 21:30:12 +00003539 // Evaluate each argument in the appropriate order.
3540 size_t CallArgsStart = Args.size();
3541 for (unsigned I = 0, E = ArgTypes.size(); I != E; ++I) {
3542 unsigned Idx = LeftToRight ? I : E - I - 1;
3543 CallExpr::const_arg_iterator Arg = ArgRange.begin() + Idx;
George Burgess IV0d6592a2017-02-23 05:59:56 +00003544 unsigned InitialArgSize = Args.size();
Akira Hatanaka46dd7dbc2017-06-28 00:42:48 +00003545 // If *Arg is an ObjCIndirectCopyRestoreExpr, check that either the types of
3546 // the argument and parameter match or the objc method is parameterized.
3547 assert((!isa<ObjCIndirectCopyRestoreExpr>(*Arg) ||
3548 getContext().hasSameUnqualifiedType((*Arg)->getType(),
3549 ArgTypes[Idx]) ||
3550 (isa<ObjCMethodDecl>(AC.getDecl()) &&
3551 isObjCMethodWithTypeParams(cast<ObjCMethodDecl>(AC.getDecl())))) &&
3552 "Argument and parameter types don't match");
Richard Smitha560ccf2016-09-29 21:30:12 +00003553 EmitCallArg(Args, *Arg, ArgTypes[Idx]);
George Burgess IV0d6592a2017-02-23 05:59:56 +00003554 // In particular, we depend on it being the last arg in Args, and the
3555 // objectsize bits depend on there only being one arg if !LeftToRight.
3556 assert(InitialArgSize + 1 == Args.size() &&
3557 "The code below depends on only adding one arg per EmitCallArg");
3558 (void)InitialArgSize;
Yaxun Liu5b330e82018-03-15 15:25:19 +00003559 // Since pointer argument are never emitted as LValue, it is safe to emit
3560 // non-null argument check for r-value only.
3561 if (!Args.back().hasLValue()) {
3562 RValue RVArg = Args.back().getKnownRValue();
3563 EmitNonNullArgCheck(RVArg, ArgTypes[Idx], (*Arg)->getExprLoc(), AC,
3564 ParamsToSkip + Idx);
3565 // @llvm.objectsize should never have side-effects and shouldn't need
3566 // destruction/cleanups, so we can safely "emit" it after its arg,
3567 // regardless of right-to-leftness
3568 MaybeEmitImplicitObjectSize(Idx, *Arg, RVArg);
3569 }
Richard Smitha560ccf2016-09-29 21:30:12 +00003570 }
Reid Kleckner739756c2013-12-04 19:23:12 +00003571
Richard Smitha560ccf2016-09-29 21:30:12 +00003572 if (!LeftToRight) {
Reid Kleckner739756c2013-12-04 19:23:12 +00003573 // Un-reverse the arguments we just evaluated so they match up with the LLVM
3574 // IR function.
3575 std::reverse(Args.begin() + CallArgsStart, Args.end());
Reid Kleckner739756c2013-12-04 19:23:12 +00003576 }
3577}
3578
Reid Kleckner314ef7b2014-02-01 00:04:45 +00003579namespace {
3580
David Blaikie7e70d682015-08-18 22:40:54 +00003581struct DestroyUnpassedArg final : EHScopeStack::Cleanup {
John McCall7f416cc2015-09-08 08:05:57 +00003582 DestroyUnpassedArg(Address Addr, QualType Ty)
Reid Kleckner314ef7b2014-02-01 00:04:45 +00003583 : Addr(Addr), Ty(Ty) {}
3584
John McCall7f416cc2015-09-08 08:05:57 +00003585 Address Addr;
Reid Kleckner314ef7b2014-02-01 00:04:45 +00003586 QualType Ty;
3587
Craig Topper4f12f102014-03-12 06:41:41 +00003588 void Emit(CodeGenFunction &CGF, Flags flags) override {
Akira Hatanaka7275da02018-02-28 07:15:55 +00003589 QualType::DestructionKind DtorKind = Ty.isDestructedType();
3590 if (DtorKind == QualType::DK_cxx_destructor) {
3591 const CXXDestructorDecl *Dtor = Ty->getAsCXXRecordDecl()->getDestructor();
3592 assert(!Dtor->isTrivial());
3593 CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete, /*for vbase*/ false,
Marco Antognini88559632019-07-22 09:39:13 +00003594 /*Delegating=*/false, Addr, Ty);
Akira Hatanaka7275da02018-02-28 07:15:55 +00003595 } else {
3596 CGF.callCStructDestructor(CGF.MakeAddrLValue(Addr, Ty));
3597 }
Reid Kleckner314ef7b2014-02-01 00:04:45 +00003598 }
3599};
3600
David Blaikie38b25912015-02-09 19:13:51 +00003601struct DisableDebugLocationUpdates {
3602 CodeGenFunction &CGF;
3603 bool disabledDebugInfo;
3604 DisableDebugLocationUpdates(CodeGenFunction &CGF, const Expr *E) : CGF(CGF) {
3605 if ((disabledDebugInfo = isa<CXXDefaultArgExpr>(E) && CGF.getDebugInfo()))
3606 CGF.disableDebugInfo();
3607 }
3608 ~DisableDebugLocationUpdates() {
3609 if (disabledDebugInfo)
3610 CGF.enableDebugInfo();
3611 }
3612};
3613
Benjamin Kramer5b4296a2015-10-28 17:16:26 +00003614} // end anonymous namespace
3615
Yaxun Liu5b330e82018-03-15 15:25:19 +00003616RValue CallArg::getRValue(CodeGenFunction &CGF) const {
3617 if (!HasLV)
3618 return RV;
3619 LValue Copy = CGF.MakeAddrLValue(CGF.CreateMemTemp(Ty), Ty);
Richard Smithe78fac52018-04-05 20:52:58 +00003620 CGF.EmitAggregateCopy(Copy, LV, Ty, AggValueSlot::DoesNotOverlap,
3621 LV.isVolatile());
Yaxun Liu5b330e82018-03-15 15:25:19 +00003622 IsUsed = true;
Akira Hatanakaf139ae32019-12-03 15:17:01 -08003623 return RValue::getAggregate(Copy.getAddress(CGF));
Yaxun Liu5b330e82018-03-15 15:25:19 +00003624}
3625
3626void CallArg::copyInto(CodeGenFunction &CGF, Address Addr) const {
3627 LValue Dst = CGF.MakeAddrLValue(Addr, Ty);
3628 if (!HasLV && RV.isScalar())
Rui Ueyama49a3ad22019-07-16 04:46:31 +00003629 CGF.EmitStoreOfScalar(RV.getScalarVal(), Dst, /*isInit=*/true);
Yaxun Liu5b330e82018-03-15 15:25:19 +00003630 else if (!HasLV && RV.isComplex())
3631 CGF.EmitStoreOfComplex(RV.getComplexVal(), Dst, /*init=*/true);
3632 else {
Akira Hatanakaf139ae32019-12-03 15:17:01 -08003633 auto Addr = HasLV ? LV.getAddress(CGF) : RV.getAggregateAddress();
Yaxun Liu5b330e82018-03-15 15:25:19 +00003634 LValue SrcLV = CGF.MakeAddrLValue(Addr, Ty);
Richard Smithe78fac52018-04-05 20:52:58 +00003635 // We assume that call args are never copied into subobjects.
3636 CGF.EmitAggregateCopy(Dst, SrcLV, Ty, AggValueSlot::DoesNotOverlap,
Yaxun Liu5b330e82018-03-15 15:25:19 +00003637 HasLV ? LV.isVolatileQualified()
3638 : RV.isVolatileQualified());
3639 }
3640 IsUsed = true;
3641}
3642
John McCall32ea9692011-03-11 20:59:21 +00003643void CodeGenFunction::EmitCallArg(CallArgList &args, const Expr *E,
3644 QualType type) {
David Blaikie38b25912015-02-09 19:13:51 +00003645 DisableDebugLocationUpdates Dis(*this, E);
John McCall31168b02011-06-15 23:02:42 +00003646 if (const ObjCIndirectCopyRestoreExpr *CRE
3647 = dyn_cast<ObjCIndirectCopyRestoreExpr>(E)) {
Richard Smith9c6890a2012-11-01 22:30:59 +00003648 assert(getLangOpts().ObjCAutoRefCount);
John McCall31168b02011-06-15 23:02:42 +00003649 return emitWritebackArg(*this, args, CRE);
3650 }
3651
John McCall0a76c0c2011-08-26 18:42:59 +00003652 assert(type->isReferenceType() == E->isGLValue() &&
3653 "reference binding to unmaterialized r-value!");
3654
John McCall17054bd62011-08-26 21:08:13 +00003655 if (E->isGLValue()) {
3656 assert(E->getObjectKind() == OK_Ordinary);
Richard Smitha1c9d4d2013-06-12 23:38:09 +00003657 return args.add(EmitReferenceBindingToExpr(E), type);
John McCall17054bd62011-08-26 21:08:13 +00003658 }
Mike Stump11289f42009-09-09 15:08:12 +00003659
Reid Kleckner23f4c4b2013-06-21 12:45:15 +00003660 bool HasAggregateEvalKind = hasAggregateEvaluationKind(type);
3661
3662 // In the Microsoft C++ ABI, aggregate arguments are destructed by the callee.
3663 // However, we still have to push an EH-only cleanup in case we unwind before
3664 // we make it to the call.
Akira Hatanaka85282972018-05-15 21:00:30 +00003665 if (HasAggregateEvalKind &&
Simon Pilgrim1cd399c2019-10-03 11:22:48 +00003666 type->castAs<RecordType>()->getDecl()->isParamDestroyedInCallee()) {
Reid Klecknerac640602014-05-01 03:07:18 +00003667 // If we're using inalloca, use the argument memory. Otherwise, use a
Reid Klecknere39ee212014-05-03 00:33:28 +00003668 // temporary.
Reid Klecknerac640602014-05-01 03:07:18 +00003669 AggValueSlot Slot;
3670 if (args.isUsingInAlloca())
3671 Slot = createPlaceholderSlot(*this, type);
3672 else
3673 Slot = CreateAggTemp(type, "agg.tmp");
Reid Klecknere39ee212014-05-03 00:33:28 +00003674
Akira Hatanaka4ce0e5a2018-04-18 23:33:15 +00003675 bool DestroyedInCallee = true, NeedsEHCleanup = true;
3676 if (const auto *RD = type->getAsCXXRecordDecl())
3677 DestroyedInCallee = RD->hasNonTrivialDestructor();
3678 else
3679 NeedsEHCleanup = needsEHCleanup(type.isDestructedType());
3680
3681 if (DestroyedInCallee)
3682 Slot.setExternallyDestructed();
Reid Klecknere39ee212014-05-03 00:33:28 +00003683
Reid Kleckner314ef7b2014-02-01 00:04:45 +00003684 EmitAggExpr(E, Slot);
3685 RValue RV = Slot.asRValue();
3686 args.add(RV, type);
Reid Kleckner23f4c4b2013-06-21 12:45:15 +00003687
Akira Hatanaka4ce0e5a2018-04-18 23:33:15 +00003688 if (DestroyedInCallee && NeedsEHCleanup) {
Reid Kleckner314ef7b2014-02-01 00:04:45 +00003689 // Create a no-op GEP between the placeholder and the cleanup so we can
3690 // RAUW it successfully. It also serves as a marker of the first
3691 // instruction where the cleanup is active.
John McCall7f416cc2015-09-08 08:05:57 +00003692 pushFullExprCleanup<DestroyUnpassedArg>(EHCleanup, Slot.getAddress(),
3693 type);
Reid Kleckner23f4c4b2013-06-21 12:45:15 +00003694 // This unreachable is a temporary marker which will be removed later.
3695 llvm::Instruction *IsActive = Builder.CreateUnreachable();
3696 args.addArgCleanupDeactivation(EHStack.getInnermostEHScope(), IsActive);
Reid Kleckner23f4c4b2013-06-21 12:45:15 +00003697 }
Reid Kleckner314ef7b2014-02-01 00:04:45 +00003698 return;
Reid Kleckner23f4c4b2013-06-21 12:45:15 +00003699 }
3700
3701 if (HasAggregateEvalKind && isa<ImplicitCastExpr>(E) &&
Eli Friedmandf968192011-05-26 00:10:27 +00003702 cast<CastExpr>(E)->getCastKind() == CK_LValueToRValue) {
3703 LValue L = EmitLValue(cast<CastExpr>(E)->getSubExpr());
3704 assert(L.isSimple());
Yaxun Liu5b330e82018-03-15 15:25:19 +00003705 args.addUncopiedAggregate(L, type);
Eli Friedmandf968192011-05-26 00:10:27 +00003706 return;
3707 }
3708
Erik Pilkingtone26c24b2020-02-12 12:02:58 -08003709 args.add(EmitAnyExprToTemp(E), type);
Anders Carlsson60ce3fe2009-04-08 20:47:54 +00003710}
3711
Reid Kleckner79b0fd72014-10-10 00:05:45 +00003712QualType CodeGenFunction::getVarArgType(const Expr *Arg) {
3713 // System headers on Windows define NULL to 0 instead of 0LL on Win64. MSVC
3714 // implicitly widens null pointer constants that are arguments to varargs
3715 // functions to pointer-sized ints.
3716 if (!getTarget().getTriple().isOSWindows())
3717 return Arg->getType();
3718
3719 if (Arg->getType()->isIntegerType() &&
3720 getContext().getTypeSize(Arg->getType()) <
3721 getContext().getTargetInfo().getPointerWidth(0) &&
3722 Arg->isNullPointerConstant(getContext(),
3723 Expr::NPC_ValueDependentIsNotNull)) {
3724 return getContext().getIntPtrType();
3725 }
3726
3727 return Arg->getType();
3728}
3729
Dan Gohman515a60d2012-02-16 00:57:37 +00003730// In ObjC ARC mode with no ObjC ARC exception safety, tell the ARC
3731// optimizer it can aggressively ignore unwind edges.
3732void
3733CodeGenFunction::AddObjCARCExceptionMetadata(llvm::Instruction *Inst) {
3734 if (CGM.getCodeGenOpts().OptimizationLevel != 0 &&
3735 !CGM.getCodeGenOpts().ObjCAutoRefCountExceptions)
3736 Inst->setMetadata("clang.arc.no_objc_arc_exceptions",
3737 CGM.getNoObjCARCExceptionsMetadata());
3738}
3739
John McCall882987f2013-02-28 19:01:20 +00003740/// Emits a call to the given no-arguments nounwind runtime function.
3741llvm::CallInst *
James Y Knight9871db02019-02-05 16:42:33 +00003742CodeGenFunction::EmitNounwindRuntimeCall(llvm::FunctionCallee callee,
John McCall882987f2013-02-28 19:01:20 +00003743 const llvm::Twine &name) {
Craig Topper5fc8fc22014-08-27 06:28:36 +00003744 return EmitNounwindRuntimeCall(callee, None, name);
John McCall882987f2013-02-28 19:01:20 +00003745}
3746
3747/// Emits a call to the given nounwind runtime function.
3748llvm::CallInst *
James Y Knight9871db02019-02-05 16:42:33 +00003749CodeGenFunction::EmitNounwindRuntimeCall(llvm::FunctionCallee callee,
3750 ArrayRef<llvm::Value *> args,
John McCall882987f2013-02-28 19:01:20 +00003751 const llvm::Twine &name) {
3752 llvm::CallInst *call = EmitRuntimeCall(callee, args, name);
3753 call->setDoesNotThrow();
3754 return call;
3755}
3756
3757/// Emits a simple call (never an invoke) to the given no-arguments
3758/// runtime function.
James Y Knight9871db02019-02-05 16:42:33 +00003759llvm::CallInst *CodeGenFunction::EmitRuntimeCall(llvm::FunctionCallee callee,
3760 const llvm::Twine &name) {
Craig Topper5fc8fc22014-08-27 06:28:36 +00003761 return EmitRuntimeCall(callee, None, name);
John McCall882987f2013-02-28 19:01:20 +00003762}
3763
David Majnemer0b17d442015-12-15 21:27:59 +00003764// Calls which may throw must have operand bundles indicating which funclet
3765// they are nested within.
Reid Klecknerb75a3f02018-02-09 00:16:41 +00003766SmallVector<llvm::OperandBundleDef, 1>
3767CodeGenFunction::getBundlesForFunclet(llvm::Value *Callee) {
3768 SmallVector<llvm::OperandBundleDef, 1> BundleList;
Sanjay Patel846b63b2016-01-18 22:15:33 +00003769 // There is no need for a funclet operand bundle if we aren't inside a
3770 // funclet.
David Majnemer0b17d442015-12-15 21:27:59 +00003771 if (!CurrentFuncletPad)
Reid Klecknerb75a3f02018-02-09 00:16:41 +00003772 return BundleList;
David Majnemer0b17d442015-12-15 21:27:59 +00003773
3774 // Skip intrinsics which cannot throw.
3775 auto *CalleeFn = dyn_cast<llvm::Function>(Callee->stripPointerCasts());
3776 if (CalleeFn && CalleeFn->isIntrinsic() && CalleeFn->doesNotThrow())
Reid Klecknerb75a3f02018-02-09 00:16:41 +00003777 return BundleList;
David Majnemer0b17d442015-12-15 21:27:59 +00003778
3779 BundleList.emplace_back("funclet", CurrentFuncletPad);
Reid Klecknerb75a3f02018-02-09 00:16:41 +00003780 return BundleList;
David Majnemer0b17d442015-12-15 21:27:59 +00003781}
3782
David Majnemer971d31b2016-02-24 17:02:45 +00003783/// Emits a simple call (never an invoke) to the given runtime function.
James Y Knight9871db02019-02-05 16:42:33 +00003784llvm::CallInst *CodeGenFunction::EmitRuntimeCall(llvm::FunctionCallee callee,
3785 ArrayRef<llvm::Value *> args,
3786 const llvm::Twine &name) {
3787 llvm::CallInst *call = Builder.CreateCall(
3788 callee, args, getBundlesForFunclet(callee.getCallee()), name);
David Majnemer971d31b2016-02-24 17:02:45 +00003789 call->setCallingConv(getRuntimeCC());
3790 return call;
3791}
3792
John McCall882987f2013-02-28 19:01:20 +00003793/// Emits a call or invoke to the given noreturn runtime function.
James Y Knight9871db02019-02-05 16:42:33 +00003794void CodeGenFunction::EmitNoreturnRuntimeCallOrInvoke(
3795 llvm::FunctionCallee callee, ArrayRef<llvm::Value *> args) {
Reid Klecknerb75a3f02018-02-09 00:16:41 +00003796 SmallVector<llvm::OperandBundleDef, 1> BundleList =
James Y Knight9871db02019-02-05 16:42:33 +00003797 getBundlesForFunclet(callee.getCallee());
David Majnemer0b17d442015-12-15 21:27:59 +00003798
John McCall882987f2013-02-28 19:01:20 +00003799 if (getInvokeDest()) {
Fangrui Song6907ce22018-07-30 19:24:48 +00003800 llvm::InvokeInst *invoke =
John McCall882987f2013-02-28 19:01:20 +00003801 Builder.CreateInvoke(callee,
3802 getUnreachableBlock(),
3803 getInvokeDest(),
David Majnemer0b17d442015-12-15 21:27:59 +00003804 args,
3805 BundleList);
John McCall882987f2013-02-28 19:01:20 +00003806 invoke->setDoesNotReturn();
3807 invoke->setCallingConv(getRuntimeCC());
3808 } else {
David Majnemer0b17d442015-12-15 21:27:59 +00003809 llvm::CallInst *call = Builder.CreateCall(callee, args, BundleList);
John McCall882987f2013-02-28 19:01:20 +00003810 call->setDoesNotReturn();
3811 call->setCallingConv(getRuntimeCC());
3812 Builder.CreateUnreachable();
3813 }
3814}
3815
Sanjay Patel846b63b2016-01-18 22:15:33 +00003816/// Emits a call or invoke instruction to the given nullary runtime function.
James Y Knight9871db02019-02-05 16:42:33 +00003817llvm::CallBase *
3818CodeGenFunction::EmitRuntimeCallOrInvoke(llvm::FunctionCallee callee,
3819 const Twine &name) {
Craig Topper5fc8fc22014-08-27 06:28:36 +00003820 return EmitRuntimeCallOrInvoke(callee, None, name);
John McCall882987f2013-02-28 19:01:20 +00003821}
3822
3823/// Emits a call or invoke instruction to the given runtime function.
James Y Knight9871db02019-02-05 16:42:33 +00003824llvm::CallBase *
3825CodeGenFunction::EmitRuntimeCallOrInvoke(llvm::FunctionCallee callee,
3826 ArrayRef<llvm::Value *> args,
3827 const Twine &name) {
James Y Knight3933add2019-01-30 02:54:28 +00003828 llvm::CallBase *call = EmitCallOrInvoke(callee, args, name);
3829 call->setCallingConv(getRuntimeCC());
3830 return call;
John McCall882987f2013-02-28 19:01:20 +00003831}
3832
John McCallbd309292010-07-06 01:34:17 +00003833/// Emits a call or invoke instruction to the given function, depending
3834/// on the current state of the EH stack.
James Y Knight9871db02019-02-05 16:42:33 +00003835llvm::CallBase *CodeGenFunction::EmitCallOrInvoke(llvm::FunctionCallee Callee,
James Y Knight3933add2019-01-30 02:54:28 +00003836 ArrayRef<llvm::Value *> Args,
3837 const Twine &Name) {
John McCallbd309292010-07-06 01:34:17 +00003838 llvm::BasicBlock *InvokeDest = getInvokeDest();
Reid Klecknerb75a3f02018-02-09 00:16:41 +00003839 SmallVector<llvm::OperandBundleDef, 1> BundleList =
James Y Knight9871db02019-02-05 16:42:33 +00003840 getBundlesForFunclet(Callee.getCallee());
John McCallbd309292010-07-06 01:34:17 +00003841
James Y Knight3933add2019-01-30 02:54:28 +00003842 llvm::CallBase *Inst;
Dan Gohman515a60d2012-02-16 00:57:37 +00003843 if (!InvokeDest)
David Majnemer3df77bc2016-01-26 23:14:47 +00003844 Inst = Builder.CreateCall(Callee, Args, BundleList, Name);
Dan Gohman515a60d2012-02-16 00:57:37 +00003845 else {
3846 llvm::BasicBlock *ContBB = createBasicBlock("invoke.cont");
David Majnemer3df77bc2016-01-26 23:14:47 +00003847 Inst = Builder.CreateInvoke(Callee, ContBB, InvokeDest, Args, BundleList,
3848 Name);
Dan Gohman515a60d2012-02-16 00:57:37 +00003849 EmitBlock(ContBB);
3850 }
3851
3852 // In ObjC ARC mode with no ObjC ARC exception safety, tell the ARC
3853 // optimizer it can aggressively ignore unwind edges.
David Blaikiebbafb8a2012-03-11 07:00:24 +00003854 if (CGM.getLangOpts().ObjCAutoRefCount)
Dan Gohman515a60d2012-02-16 00:57:37 +00003855 AddObjCARCExceptionMetadata(Inst);
3856
James Y Knight3933add2019-01-30 02:54:28 +00003857 return Inst;
John McCallbd309292010-07-06 01:34:17 +00003858}
3859
Reid Kleckner314ef7b2014-02-01 00:04:45 +00003860void CodeGenFunction::deferPlaceholderReplacement(llvm::Instruction *Old,
3861 llvm::Value *New) {
3862 DeferredReplacements.push_back(std::make_pair(Old, New));
3863}
Chris Lattnerd59d8672011-07-12 06:29:11 +00003864
Roman Lebedeve819f7c2020-01-23 22:50:15 +03003865namespace {
3866
3867/// Specify given \p NewAlign as the alignment of return value attribute. If
3868/// such attribute already exists, re-set it to the maximal one of two options.
3869LLVM_NODISCARD llvm::AttributeList
3870maybeRaiseRetAlignmentAttribute(llvm::LLVMContext &Ctx,
3871 const llvm::AttributeList &Attrs,
3872 llvm::Align NewAlign) {
3873 llvm::Align CurAlign = Attrs.getRetAlignment().valueOrOne();
3874 if (CurAlign >= NewAlign)
3875 return Attrs;
3876 llvm::Attribute AlignAttr = llvm::Attribute::getWithAlignment(Ctx, NewAlign);
3877 return Attrs
3878 .removeAttribute(Ctx, llvm::AttributeList::ReturnIndex,
3879 llvm::Attribute::AttrKind::Alignment)
3880 .addAttribute(Ctx, llvm::AttributeList::ReturnIndex, AlignAttr);
3881}
3882
3883template <typename AlignedAttrTy> class AbstractAssumeAlignedAttrEmitter {
3884protected:
3885 CodeGenFunction &CGF;
3886
3887 /// We do nothing if this is, or becomes, nullptr.
3888 const AlignedAttrTy *AA = nullptr;
3889
3890 llvm::Value *Alignment = nullptr; // May or may not be a constant.
3891 llvm::ConstantInt *OffsetCI = nullptr; // Constant, hopefully zero.
3892
3893 AbstractAssumeAlignedAttrEmitter(CodeGenFunction &CGF_, const Decl *FuncDecl)
3894 : CGF(CGF_) {
3895 if (!FuncDecl)
3896 return;
3897 AA = FuncDecl->getAttr<AlignedAttrTy>();
3898 }
3899
3900public:
3901 /// If we can, materialize the alignment as an attribute on return value.
3902 LLVM_NODISCARD llvm::AttributeList
3903 TryEmitAsCallSiteAttribute(const llvm::AttributeList &Attrs) {
3904 if (!AA || OffsetCI || CGF.SanOpts.has(SanitizerKind::Alignment))
3905 return Attrs;
3906 const auto *AlignmentCI = dyn_cast<llvm::ConstantInt>(Alignment);
3907 if (!AlignmentCI)
3908 return Attrs;
Roman Lebedev9ea5d172020-02-20 16:39:26 +03003909 // We may legitimately have non-power-of-2 alignment here.
3910 // If so, this is UB land, emit it via `@llvm.assume` instead.
3911 if (!AlignmentCI->getValue().isPowerOf2())
3912 return Attrs;
Roman Lebedeve819f7c2020-01-23 22:50:15 +03003913 llvm::AttributeList NewAttrs = maybeRaiseRetAlignmentAttribute(
3914 CGF.getLLVMContext(), Attrs,
3915 llvm::Align(
3916 AlignmentCI->getLimitedValue(llvm::Value::MaximumAlignment)));
3917 AA = nullptr; // We're done. Disallow doing anything else.
3918 return NewAttrs;
3919 }
3920
3921 /// Emit alignment assumption.
3922 /// This is a general fallback that we take if either there is an offset,
3923 /// or the alignment is variable or we are sanitizing for alignment.
3924 void EmitAsAnAssumption(SourceLocation Loc, QualType RetTy, RValue &Ret) {
3925 if (!AA)
3926 return;
Fangrui Song1d49eb02020-02-13 16:36:27 -08003927 CGF.emitAlignmentAssumption(Ret.getScalarVal(), RetTy, Loc,
Roman Lebedeve819f7c2020-01-23 22:50:15 +03003928 AA->getLocation(), Alignment, OffsetCI);
3929 AA = nullptr; // We're done. Disallow doing anything else.
3930 }
3931};
3932
3933/// Helper data structure to emit `AssumeAlignedAttr`.
3934class AssumeAlignedAttrEmitter final
3935 : public AbstractAssumeAlignedAttrEmitter<AssumeAlignedAttr> {
3936public:
3937 AssumeAlignedAttrEmitter(CodeGenFunction &CGF_, const Decl *FuncDecl)
3938 : AbstractAssumeAlignedAttrEmitter(CGF_, FuncDecl) {
3939 if (!AA)
3940 return;
3941 // It is guaranteed that the alignment/offset are constants.
3942 Alignment = cast<llvm::ConstantInt>(CGF.EmitScalarExpr(AA->getAlignment()));
3943 if (Expr *Offset = AA->getOffset()) {
3944 OffsetCI = cast<llvm::ConstantInt>(CGF.EmitScalarExpr(Offset));
3945 if (OffsetCI->isNullValue()) // Canonicalize zero offset to no offset.
3946 OffsetCI = nullptr;
3947 }
3948 }
3949};
3950
Roman Lebedev5ffe6402020-01-23 22:50:24 +03003951/// Helper data structure to emit `AllocAlignAttr`.
3952class AllocAlignAttrEmitter final
3953 : public AbstractAssumeAlignedAttrEmitter<AllocAlignAttr> {
3954public:
3955 AllocAlignAttrEmitter(CodeGenFunction &CGF_, const Decl *FuncDecl,
3956 const CallArgList &CallArgs)
3957 : AbstractAssumeAlignedAttrEmitter(CGF_, FuncDecl) {
3958 if (!AA)
3959 return;
3960 // Alignment may or may not be a constant, and that is okay.
3961 Alignment = CallArgs[AA->getParamIndex().getLLVMIndex()]
3962 .getRValue(CGF)
3963 .getScalarVal();
3964 }
3965};
3966
Roman Lebedeve819f7c2020-01-23 22:50:15 +03003967} // namespace
3968
Daniel Dunbard931a872009-02-02 22:03:45 +00003969RValue CodeGenFunction::EmitCall(const CGFunctionInfo &CallInfo,
John McCallb92ab1a2016-10-26 23:46:34 +00003970 const CGCallee &Callee,
Anders Carlsson61a401c2009-12-24 19:25:24 +00003971 ReturnValueSlot ReturnValue,
Daniel Dunbarcdbb5e32009-02-20 18:06:48 +00003972 const CallArgList &CallArgs,
James Y Knight3933add2019-01-30 02:54:28 +00003973 llvm::CallBase **callOrInvoke,
Vedant Kumar09b5bfd2017-12-21 00:10:25 +00003974 SourceLocation Loc) {
Mike Stump18bb9282009-05-16 07:57:57 +00003975 // FIXME: We no longer need the types from CallArgs; lift up and simplify.
Daniel Dunbar613855c2008-09-09 23:27:19 +00003976
Peter Collingbourneea211002018-02-05 23:09:13 +00003977 assert(Callee.isOrdinary() || Callee.isVirtual());
John McCallb92ab1a2016-10-26 23:46:34 +00003978
Daniel Dunbar613855c2008-09-09 23:27:19 +00003979 // Handle struct-return functions by passing a pointer to the
3980 // location that we would like to return into.
Daniel Dunbar7633cbf2009-02-02 21:43:58 +00003981 QualType RetTy = CallInfo.getReturnType();
Daniel Dunbarb52d0772009-02-03 05:59:18 +00003982 const ABIArgInfo &RetAI = CallInfo.getReturnInfo();
Mike Stump11289f42009-09-09 15:08:12 +00003983
James Y Knightcfe8cd72019-02-07 01:15:41 +00003984 llvm::FunctionType *IRFuncTy = getTypes().GetFunctionType(CallInfo);
John McCallb92ab1a2016-10-26 23:46:34 +00003985
Amy Huang0d0334f2019-04-12 20:25:30 +00003986 const Decl *TargetDecl = Callee.getAbstractInfo().getCalleeDecl().getDecl();
Erich Keane36176242019-06-21 22:29:32 +00003987 if (const FunctionDecl *FD = dyn_cast_or_null<FunctionDecl>(TargetDecl))
3988 // We can only guarantee that a function is called from the correct
3989 // context/function based on the appropriate target attributes,
3990 // so only check in the case where we have both always_inline and target
3991 // since otherwise we could be making a conditional call after a check for
3992 // the proper cpu features (and it won't cause code generation issues due to
3993 // function based code generation).
3994 if (TargetDecl->hasAttr<AlwaysInlineAttr>() &&
3995 TargetDecl->hasAttr<TargetAttr>())
3996 checkTargetFeatures(Loc, FD);
Amy Huang0d0334f2019-04-12 20:25:30 +00003997
James Y Knightb92d2902019-02-05 16:05:50 +00003998#ifndef NDEBUG
3999 if (!(CallInfo.isVariadic() && CallInfo.getArgStruct())) {
4000 // For an inalloca varargs function, we don't expect CallInfo to match the
4001 // function pointer's type, because the inalloca struct a will have extra
4002 // fields in it for the varargs parameters. Code later in this function
4003 // bitcasts the function pointer to the type derived from CallInfo.
4004 //
4005 // In other cases, we assert that the types match up (until pointers stop
4006 // having pointee types).
James Y Knightcfe8cd72019-02-07 01:15:41 +00004007 llvm::Type *TypeFromVal;
4008 if (Callee.isVirtual())
4009 TypeFromVal = Callee.getVirtualFunctionType();
4010 else
4011 TypeFromVal =
4012 Callee.getFunctionPointer()->getType()->getPointerElementType();
4013 assert(IRFuncTy == TypeFromVal);
James Y Knightb92d2902019-02-05 16:05:50 +00004014 }
4015#endif
4016
John McCallb92ab1a2016-10-26 23:46:34 +00004017 // 1. Set up the arguments.
Mike Stump11289f42009-09-09 15:08:12 +00004018
Reid Kleckner314ef7b2014-02-01 00:04:45 +00004019 // If we're using inalloca, insert the allocation after the stack save.
4020 // FIXME: Do this earlier rather than hacking it in here!
John McCall7f416cc2015-09-08 08:05:57 +00004021 Address ArgMemory = Address::invalid();
Reid Kleckner314ef7b2014-02-01 00:04:45 +00004022 if (llvm::StructType *ArgStruct = CallInfo.getArgStruct()) {
Matt Arsenault502ad602017-04-10 22:28:02 +00004023 const llvm::DataLayout &DL = CGM.getDataLayout();
Reid Kleckner9df1d972014-04-10 01:40:15 +00004024 llvm::Instruction *IP = CallArgs.getStackBase();
4025 llvm::AllocaInst *AI;
4026 if (IP) {
4027 IP = IP->getNextNode();
Matt Arsenault502ad602017-04-10 22:28:02 +00004028 AI = new llvm::AllocaInst(ArgStruct, DL.getAllocaAddrSpace(),
4029 "argmem", IP);
Reid Kleckner9df1d972014-04-10 01:40:15 +00004030 } else {
Reid Kleckner966abe72014-05-15 23:01:46 +00004031 AI = CreateTempAlloca(ArgStruct, "argmem");
Reid Kleckner9df1d972014-04-10 01:40:15 +00004032 }
John McCall7f416cc2015-09-08 08:05:57 +00004033 auto Align = CallInfo.getArgStructAlignment();
Guillaume Chateletc79099e2019-10-03 13:00:29 +00004034 AI->setAlignment(Align.getAsAlign());
Reid Kleckner314ef7b2014-02-01 00:04:45 +00004035 AI->setUsedWithInAlloca(true);
4036 assert(AI->isUsedWithInAlloca() && !AI->isStaticAlloca());
John McCall7f416cc2015-09-08 08:05:57 +00004037 ArgMemory = Address(AI, Align);
Reid Kleckner314ef7b2014-02-01 00:04:45 +00004038 }
4039
Alexey Samsonov153004f2014-09-29 22:08:00 +00004040 ClangToLLVMArgMapping IRFunctionArgs(CGM.getContext(), CallInfo);
Alexey Samsonov91cf4552014-08-22 01:06:06 +00004041 SmallVector<llvm::Value *, 16> IRCallArgs(IRFunctionArgs.totalIRArgs());
4042
Chris Lattner4ca97c32009-06-13 00:26:38 +00004043 // If the call returns a temporary with struct return, create a temporary
Anders Carlsson17490832009-12-24 20:40:36 +00004044 // alloca to hold the result, unless one is given to us.
John McCall7f416cc2015-09-08 08:05:57 +00004045 Address SRetPtr = Address::invalid();
Yaxun Liua2a9cfa2018-05-17 11:16:35 +00004046 Address SRetAlloca = Address::invalid();
George Burgess IV003be7c2018-03-08 05:32:30 +00004047 llvm::Value *UnusedReturnSizePtr = nullptr;
John McCallf26e73d2016-03-11 04:30:43 +00004048 if (RetAI.isIndirect() || RetAI.isInAlloca() || RetAI.isCoerceAndExpand()) {
John McCall7f416cc2015-09-08 08:05:57 +00004049 if (!ReturnValue.isNull()) {
4050 SRetPtr = ReturnValue.getValue();
4051 } else {
Yaxun Liua2a9cfa2018-05-17 11:16:35 +00004052 SRetPtr = CreateMemTemp(RetTy, "tmp", &SRetAlloca);
Leny Kholodov6aab1112015-06-08 10:23:49 +00004053 if (HaveInsertPoint() && ReturnValue.isUnused()) {
4054 uint64_t size =
4055 CGM.getDataLayout().getTypeAllocSize(ConvertTypeForMem(RetTy));
Yaxun Liua2a9cfa2018-05-17 11:16:35 +00004056 UnusedReturnSizePtr = EmitLifetimeStart(size, SRetAlloca.getPointer());
Leny Kholodov6aab1112015-06-08 10:23:49 +00004057 }
4058 }
Alexey Samsonov91cf4552014-08-22 01:06:06 +00004059 if (IRFunctionArgs.hasSRetArg()) {
John McCall7f416cc2015-09-08 08:05:57 +00004060 IRCallArgs[IRFunctionArgs.getSRetArgNo()] = SRetPtr.getPointer();
John McCallf26e73d2016-03-11 04:30:43 +00004061 } else if (RetAI.isInAlloca()) {
James Y Knight751fe282019-02-09 22:22:28 +00004062 Address Addr =
4063 Builder.CreateStructGEP(ArgMemory, RetAI.getInAllocaFieldIndex());
John McCall7f416cc2015-09-08 08:05:57 +00004064 Builder.CreateStore(SRetPtr.getPointer(), Addr);
Reid Kleckner314ef7b2014-02-01 00:04:45 +00004065 }
Anders Carlsson17490832009-12-24 20:40:36 +00004066 }
Mike Stump11289f42009-09-09 15:08:12 +00004067
John McCall12f23522016-04-04 18:33:08 +00004068 Address swiftErrorTemp = Address::invalid();
4069 Address swiftErrorArg = Address::invalid();
4070
Francis Visoiu Mistrih143f6b82019-10-08 22:10:38 +00004071 // When passing arguments using temporary allocas, we need to add the
4072 // appropriate lifetime markers. This vector keeps track of all the lifetime
4073 // markers that need to be ended right after the call.
4074 SmallVector<CallLifetimeEnd, 2> CallLifetimeEndAfterCall;
4075
John McCallb92ab1a2016-10-26 23:46:34 +00004076 // Translate all of the arguments as necessary to match the IR lowering.
Daniel Dunbara45bdbb2009-02-04 21:17:21 +00004077 assert(CallInfo.arg_size() == CallArgs.size() &&
4078 "Mismatch between function signature & arguments.");
Alexey Samsonov91cf4552014-08-22 01:06:06 +00004079 unsigned ArgNo = 0;
Daniel Dunbarb52d0772009-02-03 05:59:18 +00004080 CGFunctionInfo::const_arg_iterator info_it = CallInfo.arg_begin();
Mike Stump11289f42009-09-09 15:08:12 +00004081 for (CallArgList::const_iterator I = CallArgs.begin(), E = CallArgs.end();
Alexey Samsonov91cf4552014-08-22 01:06:06 +00004082 I != E; ++I, ++info_it, ++ArgNo) {
Daniel Dunbarb52d0772009-02-03 05:59:18 +00004083 const ABIArgInfo &ArgInfo = info_it->info;
Daniel Dunbar8fc81b02008-09-17 00:51:38 +00004084
Rafael Espindolafad28de2012-10-24 01:59:00 +00004085 // Insert a padding argument to ensure proper alignment.
Alexey Samsonov91cf4552014-08-22 01:06:06 +00004086 if (IRFunctionArgs.hasPaddingArg(ArgNo))
4087 IRCallArgs[IRFunctionArgs.getPaddingArgNo(ArgNo)] =
4088 llvm::UndefValue::get(ArgInfo.getPaddingType());
4089
4090 unsigned FirstIRArg, NumIRArgs;
4091 std::tie(FirstIRArg, NumIRArgs) = IRFunctionArgs.getIRArgs(ArgNo);
Rafael Espindolafad28de2012-10-24 01:59:00 +00004092
Daniel Dunbar8fc81b02008-09-17 00:51:38 +00004093 switch (ArgInfo.getKind()) {
Reid Kleckner314ef7b2014-02-01 00:04:45 +00004094 case ABIArgInfo::InAlloca: {
Alexey Samsonov91cf4552014-08-22 01:06:06 +00004095 assert(NumIRArgs == 0);
Reid Kleckner314ef7b2014-02-01 00:04:45 +00004096 assert(getTarget().getTriple().getArch() == llvm::Triple::x86);
Yaxun Liu5b330e82018-03-15 15:25:19 +00004097 if (I->isAggregate()) {
Yaxun Liu5b330e82018-03-15 15:25:19 +00004098 Address Addr = I->hasLValue()
Akira Hatanakaf139ae32019-12-03 15:17:01 -08004099 ? I->getKnownLValue().getAddress(*this)
Yaxun Liu5b330e82018-03-15 15:25:19 +00004100 : I->getKnownRValue().getAggregateAddress();
Reid Kleckner314ef7b2014-02-01 00:04:45 +00004101 llvm::Instruction *Placeholder =
Yaxun Liu5b330e82018-03-15 15:25:19 +00004102 cast<llvm::Instruction>(Addr.getPointer());
Reid Kleckner2c6a3892020-02-11 16:03:26 -08004103
4104 if (!ArgInfo.getInAllocaIndirect()) {
4105 // Replace the placeholder with the appropriate argument slot GEP.
4106 CGBuilderTy::InsertPoint IP = Builder.saveIP();
4107 Builder.SetInsertPoint(Placeholder);
4108 Addr = Builder.CreateStructGEP(ArgMemory,
4109 ArgInfo.getInAllocaFieldIndex());
4110 Builder.restoreIP(IP);
4111 } else {
4112 // For indirect things such as overaligned structs, replace the
4113 // placeholder with a regular aggregate temporary alloca. Store the
4114 // address of this alloca into the struct.
4115 Addr = CreateMemTemp(info_it->type, "inalloca.indirect.tmp");
4116 Address ArgSlot = Builder.CreateStructGEP(
4117 ArgMemory, ArgInfo.getInAllocaFieldIndex());
4118 Builder.CreateStore(Addr.getPointer(), ArgSlot);
4119 }
Hans Wennborgeaabaf72020-01-28 22:17:31 +01004120 deferPlaceholderReplacement(Placeholder, Addr.getPointer());
Reid Kleckner2c6a3892020-02-11 16:03:26 -08004121 } else if (ArgInfo.getInAllocaIndirect()) {
4122 // Make a temporary alloca and store the address of it into the argument
4123 // struct.
4124 Address Addr = CreateMemTempWithoutCast(
4125 I->Ty, getContext().getTypeAlignInChars(I->Ty),
4126 "indirect-arg-temp");
4127 I->copyInto(*this, Addr);
4128 Address ArgSlot =
4129 Builder.CreateStructGEP(ArgMemory, ArgInfo.getInAllocaFieldIndex());
4130 Builder.CreateStore(Addr.getPointer(), ArgSlot);
Reid Kleckner314ef7b2014-02-01 00:04:45 +00004131 } else {
4132 // Store the RValue into the argument struct.
James Y Knight751fe282019-02-09 22:22:28 +00004133 Address Addr =
4134 Builder.CreateStructGEP(ArgMemory, ArgInfo.getInAllocaFieldIndex());
John McCall7f416cc2015-09-08 08:05:57 +00004135 unsigned AS = Addr.getType()->getPointerAddressSpace();
David Majnemer32b57b02014-03-31 16:12:47 +00004136 llvm::Type *MemType = ConvertTypeForMem(I->Ty)->getPointerTo(AS);
4137 // There are some cases where a trivial bitcast is not avoidable. The
4138 // definition of a type later in a translation unit may change it's type
4139 // from {}* to (%struct.foo*)*.
John McCall7f416cc2015-09-08 08:05:57 +00004140 if (Addr.getType() != MemType)
David Majnemer32b57b02014-03-31 16:12:47 +00004141 Addr = Builder.CreateBitCast(Addr, MemType);
Yaxun Liu5b330e82018-03-15 15:25:19 +00004142 I->copyInto(*this, Addr);
Reid Kleckner314ef7b2014-02-01 00:04:45 +00004143 }
Alexey Samsonov91cf4552014-08-22 01:06:06 +00004144 break;
Reid Kleckner314ef7b2014-02-01 00:04:45 +00004145 }
4146
Daniel Dunbar03816342010-08-21 02:24:36 +00004147 case ABIArgInfo::Indirect: {
Alexey Samsonov91cf4552014-08-22 01:06:06 +00004148 assert(NumIRArgs == 1);
Yaxun Liu5b330e82018-03-15 15:25:19 +00004149 if (!I->isAggregate()) {
Daniel Dunbar747865a2009-02-05 09:16:39 +00004150 // Make a temporary alloca to pass the argument.
Yaxun Liuaefdb8e2018-06-15 15:33:22 +00004151 Address Addr = CreateMemTempWithoutCast(
4152 I->Ty, ArgInfo.getIndirectAlign(), "indirect-arg-temp");
John McCall7f416cc2015-09-08 08:05:57 +00004153 IRCallArgs[FirstIRArg] = Addr.getPointer();
John McCall47fb9502013-03-07 21:37:08 +00004154
Yaxun Liu5b330e82018-03-15 15:25:19 +00004155 I->copyInto(*this, Addr);
Daniel Dunbar747865a2009-02-05 09:16:39 +00004156 } else {
Eli Friedmaneb7fab62011-06-14 01:37:52 +00004157 // We want to avoid creating an unnecessary temporary+copy here;
Guy Benyei3832bfd2013-03-10 12:59:00 +00004158 // however, we need one in three cases:
Eli Friedmaneb7fab62011-06-14 01:37:52 +00004159 // 1. If the argument is not byval, and we are required to copy the
4160 // source. (This case doesn't occur on any common architecture.)
4161 // 2. If the argument is byval, RV is not sufficiently aligned, and
4162 // we cannot force it to be sufficiently aligned.
Yaxun Liu5b330e82018-03-15 15:25:19 +00004163 // 3. If the argument is byval, but RV is not located in default
4164 // or alloca address space.
4165 Address Addr = I->hasLValue()
Akira Hatanakaf139ae32019-12-03 15:17:01 -08004166 ? I->getKnownLValue().getAddress(*this)
Yaxun Liu5b330e82018-03-15 15:25:19 +00004167 : I->getKnownRValue().getAggregateAddress();
4168 llvm::Value *V = Addr.getPointer();
John McCall7f416cc2015-09-08 08:05:57 +00004169 CharUnits Align = ArgInfo.getIndirectAlign();
Micah Villmowdd31ca12012-10-08 16:25:52 +00004170 const llvm::DataLayout *TD = &CGM.getDataLayout();
Yaxun Liu5b330e82018-03-15 15:25:19 +00004171
4172 assert((FirstIRArg >= IRFuncTy->getNumParams() ||
4173 IRFuncTy->getParamType(FirstIRArg)->getPointerAddressSpace() ==
4174 TD->getAllocaAddrSpace()) &&
4175 "indirect argument must be in alloca address space");
4176
4177 bool NeedCopy = false;
4178
4179 if (Addr.getAlignment() < Align &&
Craig Topper68b2e502020-04-20 20:32:05 -07004180 llvm::getOrEnforceKnownAlignment(V, Align.getAsAlign(), *TD) <
4181 Align.getAsAlign()) {
Yaxun Liu5b330e82018-03-15 15:25:19 +00004182 NeedCopy = true;
4183 } else if (I->hasLValue()) {
4184 auto LV = I->getKnownLValue();
4185 auto AS = LV.getAddressSpace();
Andrew Savonichev1bf1a152018-12-10 12:03:00 +00004186
Yaxun (Sam) Liu21b43882019-12-10 11:55:33 -05004187 if (!ArgInfo.getIndirectByVal() ||
4188 (LV.getAlignment() < getContext().getTypeAlignInChars(I->Ty))) {
Andrew Savonichev1bf1a152018-12-10 12:03:00 +00004189 NeedCopy = true;
4190 }
4191 if (!getLangOpts().OpenCL) {
4192 if ((ArgInfo.getIndirectByVal() &&
4193 (AS != LangAS::Default &&
4194 AS != CGM.getASTAllocaAddressSpace()))) {
4195 NeedCopy = true;
4196 }
4197 }
4198 // For OpenCL even if RV is located in default or alloca address space
4199 // we don't want to perform address space cast for it.
4200 else if ((ArgInfo.getIndirectByVal() &&
4201 Addr.getType()->getAddressSpace() != IRFuncTy->
4202 getParamType(FirstIRArg)->getPointerAddressSpace())) {
Yaxun Liu5b330e82018-03-15 15:25:19 +00004203 NeedCopy = true;
4204 }
4205 }
Andrew Savonichev1bf1a152018-12-10 12:03:00 +00004206
Yaxun Liu5b330e82018-03-15 15:25:19 +00004207 if (NeedCopy) {
Eli Friedmaneb7fab62011-06-14 01:37:52 +00004208 // Create an aligned temporary, and copy to it.
Yaxun Liuaefdb8e2018-06-15 15:33:22 +00004209 Address AI = CreateMemTempWithoutCast(
4210 I->Ty, ArgInfo.getIndirectAlign(), "byval-temp");
John McCall7f416cc2015-09-08 08:05:57 +00004211 IRCallArgs[FirstIRArg] = AI.getPointer();
Francis Visoiu Mistrih143f6b82019-10-08 22:10:38 +00004212
4213 // Emit lifetime markers for the temporary alloca.
4214 uint64_t ByvalTempElementSize =
4215 CGM.getDataLayout().getTypeAllocSize(AI.getElementType());
4216 llvm::Value *LifetimeSize =
4217 EmitLifetimeStart(ByvalTempElementSize, AI.getPointer());
4218
4219 // Add cleanup code to emit the end lifetime marker after the call.
4220 if (LifetimeSize) // In case we disabled lifetime markers.
4221 CallLifetimeEndAfterCall.emplace_back(AI, LifetimeSize);
4222
4223 // Generate the copy.
Yaxun Liu5b330e82018-03-15 15:25:19 +00004224 I->copyInto(*this, AI);
Eli Friedmaneb7fab62011-06-14 01:37:52 +00004225 } else {
4226 // Skip the extra memcpy call.
Yaxun Liu5b330e82018-03-15 15:25:19 +00004227 auto *T = V->getType()->getPointerElementType()->getPointerTo(
4228 CGM.getDataLayout().getAllocaAddrSpace());
4229 IRCallArgs[FirstIRArg] = getTargetHooks().performAddrSpaceCast(
4230 *this, V, LangAS::Default, CGM.getASTAllocaAddressSpace(), T,
4231 true);
Eli Friedmaneb7fab62011-06-14 01:37:52 +00004232 }
Daniel Dunbar747865a2009-02-05 09:16:39 +00004233 }
4234 break;
Daniel Dunbar03816342010-08-21 02:24:36 +00004235 }
Daniel Dunbar747865a2009-02-05 09:16:39 +00004236
Daniel Dunbar94a6f252009-01-26 21:26:08 +00004237 case ABIArgInfo::Ignore:
Alexey Samsonov91cf4552014-08-22 01:06:06 +00004238 assert(NumIRArgs == 0);
Daniel Dunbar94a6f252009-01-26 21:26:08 +00004239 break;
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00004240
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00004241 case ABIArgInfo::Extend:
4242 case ABIArgInfo::Direct: {
4243 if (!isa<llvm::StructType>(ArgInfo.getCoerceToType()) &&
Chris Lattner8a2f3c72010-07-30 04:02:24 +00004244 ArgInfo.getCoerceToType() == ConvertType(info_it->type) &&
4245 ArgInfo.getDirectOffset() == 0) {
Alexey Samsonov91cf4552014-08-22 01:06:06 +00004246 assert(NumIRArgs == 1);
Chris Lattnerbb1952c2011-07-12 04:46:18 +00004247 llvm::Value *V;
Yaxun Liu5b330e82018-03-15 15:25:19 +00004248 if (!I->isAggregate())
4249 V = I->getKnownRValue().getScalarVal();
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00004250 else
Yaxun Liu5b330e82018-03-15 15:25:19 +00004251 V = Builder.CreateLoad(
Akira Hatanakaf139ae32019-12-03 15:17:01 -08004252 I->hasLValue() ? I->getKnownLValue().getAddress(*this)
Yaxun Liu5b330e82018-03-15 15:25:19 +00004253 : I->getKnownRValue().getAggregateAddress());
Alexey Samsonov91cf4552014-08-22 01:06:06 +00004254
John McCall12f23522016-04-04 18:33:08 +00004255 // Implement swifterror by copying into a new swifterror argument.
4256 // We'll write back in the normal path out of the call.
4257 if (CallInfo.getExtParameterInfo(ArgNo).getABI()
4258 == ParameterABI::SwiftErrorResult) {
4259 assert(!swiftErrorTemp.isValid() && "multiple swifterror args");
4260
4261 QualType pointeeTy = I->Ty->getPointeeType();
4262 swiftErrorArg =
4263 Address(V, getContext().getTypeAlignInChars(pointeeTy));
4264
4265 swiftErrorTemp =
4266 CreateMemTemp(pointeeTy, getPointerAlign(), "swifterror.temp");
4267 V = swiftErrorTemp.getPointer();
4268 cast<llvm::AllocaInst>(V)->setSwiftError(true);
4269
4270 llvm::Value *errorValue = Builder.CreateLoad(swiftErrorArg);
4271 Builder.CreateStore(errorValue, swiftErrorTemp);
4272 }
4273
Reid Kleckner79b0fd72014-10-10 00:05:45 +00004274 // We might have to widen integers, but we should never truncate.
4275 if (ArgInfo.getCoerceToType() != V->getType() &&
4276 V->getType()->isIntegerTy())
4277 V = Builder.CreateZExt(V, ArgInfo.getCoerceToType());
4278
Chris Lattner3ce86682011-07-12 04:53:39 +00004279 // If the argument doesn't match, perform a bitcast to coerce it. This
4280 // can happen due to trivial type mismatches.
Alexey Samsonov91cf4552014-08-22 01:06:06 +00004281 if (FirstIRArg < IRFuncTy->getNumParams() &&
4282 V->getType() != IRFuncTy->getParamType(FirstIRArg))
4283 V = Builder.CreateBitCast(V, IRFuncTy->getParamType(FirstIRArg));
John McCall12f23522016-04-04 18:33:08 +00004284
Alexey Samsonov91cf4552014-08-22 01:06:06 +00004285 IRCallArgs[FirstIRArg] = V;
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00004286 break;
4287 }
Daniel Dunbar94a6f252009-01-26 21:26:08 +00004288
Daniel Dunbar2f219b02009-02-03 19:12:28 +00004289 // FIXME: Avoid the conversion through memory if possible.
John McCall7f416cc2015-09-08 08:05:57 +00004290 Address Src = Address::invalid();
Yaxun Liu5b330e82018-03-15 15:25:19 +00004291 if (!I->isAggregate()) {
John McCall7f416cc2015-09-08 08:05:57 +00004292 Src = CreateMemTemp(I->Ty, "coerce");
Yaxun Liu5b330e82018-03-15 15:25:19 +00004293 I->copyInto(*this, Src);
Ulrich Weigand6e2cea62015-07-10 11:31:43 +00004294 } else {
Akira Hatanakaf139ae32019-12-03 15:17:01 -08004295 Src = I->hasLValue() ? I->getKnownLValue().getAddress(*this)
Yaxun Liu5b330e82018-03-15 15:25:19 +00004296 : I->getKnownRValue().getAggregateAddress();
Ulrich Weigand6e2cea62015-07-10 11:31:43 +00004297 }
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00004298
Chris Lattner8a2f3c72010-07-30 04:02:24 +00004299 // If the value is offset in memory, apply the offset now.
John McCall7f416cc2015-09-08 08:05:57 +00004300 Src = emitAddressAtOffset(*this, Src, ArgInfo);
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00004301
Oliver Stannard2bfdc5b2014-08-27 10:43:15 +00004302 // Fast-isel and the optimizer generally like scalar values better than
4303 // FCAs, so we flatten them if this is safe to do for this argument.
James Molloy6f244b62014-05-09 16:21:39 +00004304 llvm::StructType *STy =
4305 dyn_cast<llvm::StructType>(ArgInfo.getCoerceToType());
Oliver Stannard2bfdc5b2014-08-27 10:43:15 +00004306 if (STy && ArgInfo.isDirect() && ArgInfo.getCanBeFlattened()) {
Eli Friedman83fa8112020-04-03 15:11:40 -07004307 llvm::Type *SrcTy = Src.getElementType();
Chandler Carrutha6399a52012-10-10 11:29:08 +00004308 uint64_t SrcSize = CGM.getDataLayout().getTypeAllocSize(SrcTy);
4309 uint64_t DstSize = CGM.getDataLayout().getTypeAllocSize(STy);
4310
4311 // If the source type is smaller than the destination type of the
4312 // coerce-to logic, copy the source value into a temp alloca the size
4313 // of the destination type to allow loading all of it. The bits past
4314 // the source value are left undef.
4315 if (SrcSize < DstSize) {
John McCall7f416cc2015-09-08 08:05:57 +00004316 Address TempAlloca
4317 = CreateTempAlloca(STy, Src.getAlignment(),
4318 Src.getName() + ".coerce");
4319 Builder.CreateMemCpy(TempAlloca, Src, SrcSize);
4320 Src = TempAlloca;
Chandler Carrutha6399a52012-10-10 11:29:08 +00004321 } else {
Matt Arsenault7a124f32017-08-01 20:36:57 +00004322 Src = Builder.CreateBitCast(Src,
4323 STy->getPointerTo(Src.getAddressSpace()));
Chandler Carrutha6399a52012-10-10 11:29:08 +00004324 }
4325
Alexey Samsonov91cf4552014-08-22 01:06:06 +00004326 assert(NumIRArgs == STy->getNumElements());
Chris Lattnerceddafb2010-07-05 20:41:41 +00004327 for (unsigned i = 0, e = STy->getNumElements(); i != e; ++i) {
James Y Knight751fe282019-02-09 22:22:28 +00004328 Address EltPtr = Builder.CreateStructGEP(Src, i);
John McCall7f416cc2015-09-08 08:05:57 +00004329 llvm::Value *LI = Builder.CreateLoad(EltPtr);
Alexey Samsonov91cf4552014-08-22 01:06:06 +00004330 IRCallArgs[FirstIRArg + i] = LI;
Chris Lattner15ec3612010-06-29 00:06:42 +00004331 }
Chris Lattner3dd716c2010-06-28 23:44:11 +00004332 } else {
Chris Lattner15ec3612010-06-29 00:06:42 +00004333 // In the simple case, just pass the coerced loaded value.
Alexey Samsonov91cf4552014-08-22 01:06:06 +00004334 assert(NumIRArgs == 1);
4335 IRCallArgs[FirstIRArg] =
John McCall7f416cc2015-09-08 08:05:57 +00004336 CreateCoercedLoad(Src, ArgInfo.getCoerceToType(), *this);
Chris Lattner3dd716c2010-06-28 23:44:11 +00004337 }
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00004338
Daniel Dunbar2f219b02009-02-03 19:12:28 +00004339 break;
4340 }
4341
John McCallf26e73d2016-03-11 04:30:43 +00004342 case ABIArgInfo::CoerceAndExpand: {
John McCallf26e73d2016-03-11 04:30:43 +00004343 auto coercionType = ArgInfo.getCoerceAndExpandType();
4344 auto layout = CGM.getDataLayout().getStructLayout(coercionType);
4345
John McCall12f23522016-04-04 18:33:08 +00004346 llvm::Value *tempSize = nullptr;
4347 Address addr = Address::invalid();
Yaxun Liua2a9cfa2018-05-17 11:16:35 +00004348 Address AllocaAddr = Address::invalid();
Yaxun Liu5b330e82018-03-15 15:25:19 +00004349 if (I->isAggregate()) {
Akira Hatanakaf139ae32019-12-03 15:17:01 -08004350 addr = I->hasLValue() ? I->getKnownLValue().getAddress(*this)
Yaxun Liu5b330e82018-03-15 15:25:19 +00004351 : I->getKnownRValue().getAggregateAddress();
4352
John McCall12f23522016-04-04 18:33:08 +00004353 } else {
Yaxun Liu5b330e82018-03-15 15:25:19 +00004354 RValue RV = I->getKnownRValue();
John McCall12f23522016-04-04 18:33:08 +00004355 assert(RV.isScalar()); // complex should always just be direct
4356
4357 llvm::Type *scalarType = RV.getScalarVal()->getType();
4358 auto scalarSize = CGM.getDataLayout().getTypeAllocSize(scalarType);
4359 auto scalarAlign = CGM.getDataLayout().getPrefTypeAlignment(scalarType);
4360
John McCall12f23522016-04-04 18:33:08 +00004361 // Materialize to a temporary.
Guillaume Chateletf7caf3c2019-09-23 09:04:12 +00004362 addr = CreateTempAlloca(
4363 RV.getScalarVal()->getType(),
4364 CharUnits::fromQuantity(std::max(
4365 (unsigned)layout->getAlignment().value(), scalarAlign)),
4366 "tmp",
4367 /*ArraySize=*/nullptr, &AllocaAddr);
Yaxun Liua2a9cfa2018-05-17 11:16:35 +00004368 tempSize = EmitLifetimeStart(scalarSize, AllocaAddr.getPointer());
John McCall12f23522016-04-04 18:33:08 +00004369
4370 Builder.CreateStore(RV.getScalarVal(), addr);
4371 }
4372
John McCallf26e73d2016-03-11 04:30:43 +00004373 addr = Builder.CreateElementBitCast(addr, coercionType);
4374
4375 unsigned IRArgPos = FirstIRArg;
4376 for (unsigned i = 0, e = coercionType->getNumElements(); i != e; ++i) {
4377 llvm::Type *eltType = coercionType->getElementType(i);
4378 if (ABIArgInfo::isPaddingForCoerceAndExpand(eltType)) continue;
James Y Knight751fe282019-02-09 22:22:28 +00004379 Address eltAddr = Builder.CreateStructGEP(addr, i);
John McCallf26e73d2016-03-11 04:30:43 +00004380 llvm::Value *elt = Builder.CreateLoad(eltAddr);
4381 IRCallArgs[IRArgPos++] = elt;
4382 }
4383 assert(IRArgPos == FirstIRArg + NumIRArgs);
4384
John McCall12f23522016-04-04 18:33:08 +00004385 if (tempSize) {
Yaxun Liua2a9cfa2018-05-17 11:16:35 +00004386 EmitLifetimeEnd(tempSize, AllocaAddr.getPointer());
John McCall12f23522016-04-04 18:33:08 +00004387 }
4388
John McCallf26e73d2016-03-11 04:30:43 +00004389 break;
4390 }
4391
Daniel Dunbar8fc81b02008-09-17 00:51:38 +00004392 case ABIArgInfo::Expand:
Alexey Samsonov91cf4552014-08-22 01:06:06 +00004393 unsigned IRArgPos = FirstIRArg;
Yaxun Liu5b330e82018-03-15 15:25:19 +00004394 ExpandTypeToArgs(I->Ty, *I, IRFuncTy, IRCallArgs, IRArgPos);
Alexey Samsonov91cf4552014-08-22 01:06:06 +00004395 assert(IRArgPos == FirstIRArg + NumIRArgs);
Daniel Dunbar8fc81b02008-09-17 00:51:38 +00004396 break;
Daniel Dunbar613855c2008-09-09 23:27:19 +00004397 }
4398 }
Mike Stump11289f42009-09-09 15:08:12 +00004399
John McCall9831b842018-02-06 18:52:44 +00004400 const CGCallee &ConcreteCallee = Callee.prepareConcreteCallee(*this);
4401 llvm::Value *CalleePtr = ConcreteCallee.getFunctionPointer();
John McCallb92ab1a2016-10-26 23:46:34 +00004402
4403 // If we're using inalloca, set up that argument.
John McCall7f416cc2015-09-08 08:05:57 +00004404 if (ArgMemory.isValid()) {
4405 llvm::Value *Arg = ArgMemory.getPointer();
Reid Klecknerafba553e2014-07-08 02:24:27 +00004406 if (CallInfo.isVariadic()) {
4407 // When passing non-POD arguments by value to variadic functions, we will
4408 // end up with a variadic prototype and an inalloca call site. In such
4409 // cases, we can't do any parameter mismatch checks. Give up and bitcast
4410 // the callee.
John McCallb92ab1a2016-10-26 23:46:34 +00004411 unsigned CalleeAS = CalleePtr->getType()->getPointerAddressSpace();
James Y Knightcfe8cd72019-02-07 01:15:41 +00004412 CalleePtr =
4413 Builder.CreateBitCast(CalleePtr, IRFuncTy->getPointerTo(CalleeAS));
Reid Klecknerafba553e2014-07-08 02:24:27 +00004414 } else {
4415 llvm::Type *LastParamTy =
4416 IRFuncTy->getParamType(IRFuncTy->getNumParams() - 1);
4417 if (Arg->getType() != LastParamTy) {
Reid Kleckner314ef7b2014-02-01 00:04:45 +00004418#ifndef NDEBUG
Reid Klecknerafba553e2014-07-08 02:24:27 +00004419 // Assert that these structs have equivalent element types.
4420 llvm::StructType *FullTy = CallInfo.getArgStruct();
4421 llvm::StructType *DeclaredTy = cast<llvm::StructType>(
4422 cast<llvm::PointerType>(LastParamTy)->getElementType());
4423 assert(DeclaredTy->getNumElements() == FullTy->getNumElements());
4424 for (llvm::StructType::element_iterator DI = DeclaredTy->element_begin(),
4425 DE = DeclaredTy->element_end(),
4426 FI = FullTy->element_begin();
4427 DI != DE; ++DI, ++FI)
4428 assert(*DI == *FI);
Reid Kleckner314ef7b2014-02-01 00:04:45 +00004429#endif
Reid Klecknerafba553e2014-07-08 02:24:27 +00004430 Arg = Builder.CreateBitCast(Arg, LastParamTy);
4431 }
Reid Kleckner314ef7b2014-02-01 00:04:45 +00004432 }
Alexey Samsonov91cf4552014-08-22 01:06:06 +00004433 assert(IRFunctionArgs.hasInallocaArg());
4434 IRCallArgs[IRFunctionArgs.getInallocaArgNo()] = Arg;
Reid Kleckner314ef7b2014-02-01 00:04:45 +00004435 }
4436
John McCallb92ab1a2016-10-26 23:46:34 +00004437 // 2. Prepare the function pointer.
4438
4439 // If the callee is a bitcast of a non-variadic function to have a
4440 // variadic function pointer type, check to see if we can remove the
4441 // bitcast. This comes up with unprototyped functions.
4442 //
4443 // This makes the IR nicer, but more importantly it ensures that we
4444 // can inline the function at -O0 if it is marked always_inline.
James Y Knightcfe8cd72019-02-07 01:15:41 +00004445 auto simplifyVariadicCallee = [](llvm::FunctionType *CalleeFT,
4446 llvm::Value *Ptr) -> llvm::Function * {
John McCallb92ab1a2016-10-26 23:46:34 +00004447 if (!CalleeFT->isVarArg())
James Y Knightcfe8cd72019-02-07 01:15:41 +00004448 return nullptr;
John McCallb92ab1a2016-10-26 23:46:34 +00004449
James Y Knightcfe8cd72019-02-07 01:15:41 +00004450 // Get underlying value if it's a bitcast
4451 if (llvm::ConstantExpr *CE = dyn_cast<llvm::ConstantExpr>(Ptr)) {
4452 if (CE->getOpcode() == llvm::Instruction::BitCast)
4453 Ptr = CE->getOperand(0);
4454 }
John McCallb92ab1a2016-10-26 23:46:34 +00004455
James Y Knightcfe8cd72019-02-07 01:15:41 +00004456 llvm::Function *OrigFn = dyn_cast<llvm::Function>(Ptr);
John McCallb92ab1a2016-10-26 23:46:34 +00004457 if (!OrigFn)
James Y Knightcfe8cd72019-02-07 01:15:41 +00004458 return nullptr;
John McCallb92ab1a2016-10-26 23:46:34 +00004459
4460 llvm::FunctionType *OrigFT = OrigFn->getFunctionType();
4461
4462 // If the original type is variadic, or if any of the component types
4463 // disagree, we cannot remove the cast.
4464 if (OrigFT->isVarArg() ||
4465 OrigFT->getNumParams() != CalleeFT->getNumParams() ||
4466 OrigFT->getReturnType() != CalleeFT->getReturnType())
James Y Knightcfe8cd72019-02-07 01:15:41 +00004467 return nullptr;
John McCallb92ab1a2016-10-26 23:46:34 +00004468
4469 for (unsigned i = 0, e = OrigFT->getNumParams(); i != e; ++i)
4470 if (OrigFT->getParamType(i) != CalleeFT->getParamType(i))
James Y Knightcfe8cd72019-02-07 01:15:41 +00004471 return nullptr;
John McCallb92ab1a2016-10-26 23:46:34 +00004472
4473 return OrigFn;
4474 };
James Y Knightcfe8cd72019-02-07 01:15:41 +00004475
4476 if (llvm::Function *OrigFn = simplifyVariadicCallee(IRFuncTy, CalleePtr)) {
4477 CalleePtr = OrigFn;
4478 IRFuncTy = OrigFn->getFunctionType();
4479 }
John McCallb92ab1a2016-10-26 23:46:34 +00004480
4481 // 3. Perform the actual call.
4482
4483 // Deactivate any cleanups that we're supposed to do immediately before
4484 // the call.
Reid Kleckner23f4c4b2013-06-21 12:45:15 +00004485 if (!CallArgs.getCleanupsToDeactivate().empty())
4486 deactivateArgCleanupsBeforeCall(*this, CallArgs);
4487
John McCallb92ab1a2016-10-26 23:46:34 +00004488 // Assert that the arguments we computed match up. The IR verifier
4489 // will catch this, but this is a common enough source of problems
4490 // during IRGen changes that it's way better for debugging to catch
4491 // it ourselves here.
4492#ifndef NDEBUG
Alexey Samsonov91cf4552014-08-22 01:06:06 +00004493 assert(IRCallArgs.size() == IRFuncTy->getNumParams() || IRFuncTy->isVarArg());
4494 for (unsigned i = 0; i < IRCallArgs.size(); ++i) {
4495 // Inalloca argument can have different type.
4496 if (IRFunctionArgs.hasInallocaArg() &&
4497 i == IRFunctionArgs.getInallocaArgNo())
4498 continue;
4499 if (i < IRFuncTy->getNumParams())
4500 assert(IRCallArgs[i]->getType() == IRFuncTy->getParamType(i));
4501 }
John McCallb92ab1a2016-10-26 23:46:34 +00004502#endif
Alexey Samsonov91cf4552014-08-22 01:06:06 +00004503
Craig Topper3113ec32018-10-24 17:42:17 +00004504 // Update the largest vector width if any arguments have vector types.
4505 for (unsigned i = 0; i < IRCallArgs.size(); ++i) {
4506 if (auto *VT = dyn_cast<llvm::VectorType>(IRCallArgs[i]->getType()))
Kerry McLaughlinaf649482020-03-17 10:27:29 +00004507 LargestVectorWidth =
4508 std::max((uint64_t)LargestVectorWidth,
4509 VT->getPrimitiveSizeInBits().getKnownMinSize());
Craig Topper3113ec32018-10-24 17:42:17 +00004510 }
4511
John McCallb92ab1a2016-10-26 23:46:34 +00004512 // Compute the calling convention and attributes.
Daniel Dunbar0ef34792009-09-12 00:59:20 +00004513 unsigned CallingConv;
Reid Klecknercdd26792017-04-18 23:50:03 +00004514 llvm::AttributeList Attrs;
John McCallb92ab1a2016-10-26 23:46:34 +00004515 CGM.ConstructAttributeList(CalleePtr->getName(), CallInfo,
Reid Klecknercdd26792017-04-18 23:50:03 +00004516 Callee.getAbstractInfo(), Attrs, CallingConv,
Chad Rosier7dbc9cf2016-01-06 14:35:46 +00004517 /*AttrOnCallSite=*/true);
Mike Stump11289f42009-09-09 15:08:12 +00004518
Melanie Blower7f9b5132019-12-04 12:23:46 -08004519 if (const FunctionDecl *FD = dyn_cast_or_null<FunctionDecl>(CurFuncDecl))
4520 if (FD->usesFPIntrin())
4521 // All calls within a strictfp function are marked strictfp
4522 Attrs =
4523 Attrs.addAttribute(getLLVMContext(), llvm::AttributeList::FunctionIndex,
4524 llvm::Attribute::StrictFP);
4525
John McCallb92ab1a2016-10-26 23:46:34 +00004526 // Apply some call-site-specific attributes.
4527 // TODO: work this into building the attribute set.
4528
4529 // Apply always_inline to all calls within flatten functions.
4530 // FIXME: should this really take priority over __try, below?
4531 if (CurCodeDecl && CurCodeDecl->hasAttr<FlattenAttr>() &&
Amy Huang0d0334f2019-04-12 20:25:30 +00004532 !(TargetDecl && TargetDecl->hasAttr<NoInlineAttr>())) {
John McCallb92ab1a2016-10-26 23:46:34 +00004533 Attrs =
Reid Klecknerde864822017-03-21 16:57:30 +00004534 Attrs.addAttribute(getLLVMContext(), llvm::AttributeList::FunctionIndex,
John McCallb92ab1a2016-10-26 23:46:34 +00004535 llvm::Attribute::AlwaysInline);
4536 }
4537
4538 // Disable inlining inside SEH __try blocks.
4539 if (isSEHTryScope()) {
4540 Attrs =
Reid Klecknerde864822017-03-21 16:57:30 +00004541 Attrs.addAttribute(getLLVMContext(), llvm::AttributeList::FunctionIndex,
John McCallb92ab1a2016-10-26 23:46:34 +00004542 llvm::Attribute::NoInline);
4543 }
4544
4545 // Decide whether to use a call or an invoke.
David Majnemer4e52d6f2015-12-12 05:39:21 +00004546 bool CannotThrow;
4547 if (currentFunctionUsesSEHTry()) {
John McCallb92ab1a2016-10-26 23:46:34 +00004548 // SEH cares about asynchronous exceptions, so everything can "throw."
David Majnemer4e52d6f2015-12-12 05:39:21 +00004549 CannotThrow = false;
4550 } else if (isCleanupPadScope() &&
4551 EHPersonality::get(*this).isMSVCXXPersonality()) {
4552 // The MSVC++ personality will implicitly terminate the program if an
John McCallb92ab1a2016-10-26 23:46:34 +00004553 // exception is thrown during a cleanup outside of a try/catch.
4554 // We don't need to model anything in IR to get this behavior.
David Majnemer4e52d6f2015-12-12 05:39:21 +00004555 CannotThrow = true;
4556 } else {
John McCallb92ab1a2016-10-26 23:46:34 +00004557 // Otherwise, nounwind call sites will never throw.
Reid Klecknerde864822017-03-21 16:57:30 +00004558 CannotThrow = Attrs.hasAttribute(llvm::AttributeList::FunctionIndex,
David Majnemer4e52d6f2015-12-12 05:39:21 +00004559 llvm::Attribute::NoUnwind);
4560 }
George Burgess IV003be7c2018-03-08 05:32:30 +00004561
4562 // If we made a temporary, be sure to clean up after ourselves. Note that we
4563 // can't depend on being inside of an ExprWithCleanups, so we need to manually
4564 // pop this cleanup later on. Being eager about this is OK, since this
4565 // temporary is 'invisible' outside of the callee.
4566 if (UnusedReturnSizePtr)
Yaxun Liua2a9cfa2018-05-17 11:16:35 +00004567 pushFullExprCleanup<CallLifetimeEnd>(NormalEHLifetimeMarker, SRetAlloca,
George Burgess IV003be7c2018-03-08 05:32:30 +00004568 UnusedReturnSizePtr);
4569
David Majnemer4e52d6f2015-12-12 05:39:21 +00004570 llvm::BasicBlock *InvokeDest = CannotThrow ? nullptr : getInvokeDest();
John McCallbd309292010-07-06 01:34:17 +00004571
Reid Klecknerb75a3f02018-02-09 00:16:41 +00004572 SmallVector<llvm::OperandBundleDef, 1> BundleList =
4573 getBundlesForFunclet(CalleePtr);
David Majnemer0b17d442015-12-15 21:27:59 +00004574
Melanie Blower7f9b5132019-12-04 12:23:46 -08004575 if (const FunctionDecl *FD = dyn_cast_or_null<FunctionDecl>(CurFuncDecl))
4576 if (FD->usesFPIntrin())
4577 // All calls within a strictfp function are marked strictfp
4578 Attrs =
4579 Attrs.addAttribute(getLLVMContext(), llvm::AttributeList::FunctionIndex,
4580 llvm::Attribute::StrictFP);
4581
Roman Lebedeve819f7c2020-01-23 22:50:15 +03004582 AssumeAlignedAttrEmitter AssumeAlignedAttrEmitter(*this, TargetDecl);
4583 Attrs = AssumeAlignedAttrEmitter.TryEmitAsCallSiteAttribute(Attrs);
4584
Roman Lebedev5ffe6402020-01-23 22:50:24 +03004585 AllocAlignAttrEmitter AllocAlignAttrEmitter(*this, TargetDecl, CallArgs);
4586 Attrs = AllocAlignAttrEmitter.TryEmitAsCallSiteAttribute(Attrs);
4587
John McCallb92ab1a2016-10-26 23:46:34 +00004588 // Emit the actual call/invoke instruction.
James Y Knight3933add2019-01-30 02:54:28 +00004589 llvm::CallBase *CI;
John McCallbd309292010-07-06 01:34:17 +00004590 if (!InvokeDest) {
James Y Knightcfe8cd72019-02-07 01:15:41 +00004591 CI = Builder.CreateCall(IRFuncTy, CalleePtr, IRCallArgs, BundleList);
Daniel Dunbar12347492009-02-23 17:26:39 +00004592 } else {
4593 llvm::BasicBlock *Cont = createBasicBlock("invoke.cont");
James Y Knightcfe8cd72019-02-07 01:15:41 +00004594 CI = Builder.CreateInvoke(IRFuncTy, CalleePtr, Cont, InvokeDest, IRCallArgs,
David Majnemer0b17d442015-12-15 21:27:59 +00004595 BundleList);
Daniel Dunbar12347492009-02-23 17:26:39 +00004596 EmitBlock(Cont);
Daniel Dunbar5006f4a2009-02-20 18:54:31 +00004597 }
Chris Lattnere70a0072010-06-29 16:40:28 +00004598 if (callOrInvoke)
John McCallb92ab1a2016-10-26 23:46:34 +00004599 *callOrInvoke = CI;
Daniel Dunbar5006f4a2009-02-20 18:54:31 +00004600
Andrew Paverdbdd88b72020-01-10 11:08:18 +00004601 // If this is within a function that has the guard(nocf) attribute and is an
4602 // indirect call, add the "guard_nocf" attribute to this call to indicate that
4603 // Control Flow Guard checks should not be added, even if the call is inlined.
4604 if (const auto *FD = dyn_cast_or_null<FunctionDecl>(CurFuncDecl)) {
4605 if (const auto *A = FD->getAttr<CFGuardAttr>()) {
4606 if (A->getGuard() == CFGuardAttr::GuardArg::nocf && !CI->getCalledFunction())
4607 Attrs = Attrs.addAttribute(
4608 getLLVMContext(), llvm::AttributeList::FunctionIndex, "guard_nocf");
4609 }
4610 }
4611
John McCallb92ab1a2016-10-26 23:46:34 +00004612 // Apply the attributes and calling convention.
James Y Knight3933add2019-01-30 02:54:28 +00004613 CI->setAttributes(Attrs);
4614 CI->setCallingConv(static_cast<llvm::CallingConv::ID>(CallingConv));
Daniel Dunbarb960b7b2009-03-02 04:32:35 +00004615
John McCallb92ab1a2016-10-26 23:46:34 +00004616 // Apply various metadata.
4617
4618 if (!CI->getType()->isVoidTy())
4619 CI->setName("call");
4620
Craig Topper3113ec32018-10-24 17:42:17 +00004621 // Update largest vector width from the return type.
4622 if (auto *VT = dyn_cast<llvm::VectorType>(CI->getType()))
Kerry McLaughlinaf649482020-03-17 10:27:29 +00004623 LargestVectorWidth =
4624 std::max((uint64_t)LargestVectorWidth,
4625 VT->getPrimitiveSizeInBits().getKnownMinSize());
Craig Topper3113ec32018-10-24 17:42:17 +00004626
Adam Nemet1e217bc2016-03-28 22:18:53 +00004627 // Insert instrumentation or attach profile metadata at indirect call sites.
4628 // For more details, see the comment before the definition of
4629 // IPVK_IndirectCallTarget in InstrProfData.inc.
James Y Knight3933add2019-01-30 02:54:28 +00004630 if (!CI->getCalledFunction())
Betul Buyukkurt518276a2016-01-23 22:50:44 +00004631 PGO.valueProfile(Builder, llvm::IPVK_IndirectCallTarget,
John McCallb92ab1a2016-10-26 23:46:34 +00004632 CI, CalleePtr);
Betul Buyukkurt518276a2016-01-23 22:50:44 +00004633
Dan Gohman515a60d2012-02-16 00:57:37 +00004634 // In ObjC ARC mode with no ObjC ARC exception safety, tell the ARC
4635 // optimizer it can aggressively ignore unwind edges.
David Blaikiebbafb8a2012-03-11 07:00:24 +00004636 if (CGM.getLangOpts().ObjCAutoRefCount)
John McCallb92ab1a2016-10-26 23:46:34 +00004637 AddObjCARCExceptionMetadata(CI);
4638
4639 // Suppress tail calls if requested.
4640 if (llvm::CallInst *Call = dyn_cast<llvm::CallInst>(CI)) {
John McCallb92ab1a2016-10-26 23:46:34 +00004641 if (TargetDecl && TargetDecl->hasAttr<NotTailCalledAttr>())
4642 Call->setTailCallKind(llvm::CallInst::TCK_NoTail);
4643 }
4644
Amy Huang0d0334f2019-04-12 20:25:30 +00004645 // Add metadata for calls to MSAllocator functions
Amy Huang0d0334f2019-04-12 20:25:30 +00004646 if (getDebugInfo() && TargetDecl &&
4647 TargetDecl->hasAttr<MSAllocatorAttr>())
4648 getDebugInfo()->addHeapAllocSiteMetadata(CI, RetTy, Loc);
4649
John McCallb92ab1a2016-10-26 23:46:34 +00004650 // 4. Finish the call.
Dan Gohman515a60d2012-02-16 00:57:37 +00004651
Daniel Dunbarb960b7b2009-03-02 04:32:35 +00004652 // If the call doesn't return, finish the basic block and clear the
John McCallb92ab1a2016-10-26 23:46:34 +00004653 // insertion point; this allows the rest of IRGen to discard
Daniel Dunbarb960b7b2009-03-02 04:32:35 +00004654 // unreachable code.
James Y Knight3933add2019-01-30 02:54:28 +00004655 if (CI->doesNotReturn()) {
George Burgess IV003be7c2018-03-08 05:32:30 +00004656 if (UnusedReturnSizePtr)
4657 PopCleanupBlock();
Leny Kholodov6aab1112015-06-08 10:23:49 +00004658
Julian Lettnerb62e9dc2019-01-24 18:04:21 +00004659 // Strip away the noreturn attribute to better diagnose unreachable UB.
Vedant Kumar09b5bfd2017-12-21 00:10:25 +00004660 if (SanOpts.has(SanitizerKind::Unreachable)) {
Julian Lettnerf82d8922019-02-02 02:05:16 +00004661 // Also remove from function since CallBase::hasFnAttr additionally checks
4662 // attributes of the called function.
James Y Knight3933add2019-01-30 02:54:28 +00004663 if (auto *F = CI->getCalledFunction())
Vedant Kumar09b5bfd2017-12-21 00:10:25 +00004664 F->removeFnAttr(llvm::Attribute::NoReturn);
James Y Knight3933add2019-01-30 02:54:28 +00004665 CI->removeAttribute(llvm::AttributeList::FunctionIndex,
4666 llvm::Attribute::NoReturn);
Julian Lettnerb6c06dc2019-02-01 02:51:00 +00004667
4668 // Avoid incompatibility with ASan which relies on the `noreturn`
4669 // attribute to insert handler calls.
Julian Lettner98b9f5b2019-02-04 23:37:50 +00004670 if (SanOpts.hasOneOf(SanitizerKind::Address |
4671 SanitizerKind::KernelAddress)) {
Julian Lettnerb6c06dc2019-02-01 02:51:00 +00004672 SanitizerScope SanScope(this);
4673 llvm::IRBuilder<>::InsertPointGuard IPGuard(Builder);
4674 Builder.SetInsertPoint(CI);
4675 auto *FnType = llvm::FunctionType::get(CGM.VoidTy, /*isVarArg=*/false);
James Y Knight9871db02019-02-05 16:42:33 +00004676 llvm::FunctionCallee Fn =
4677 CGM.CreateRuntimeFunction(FnType, "__asan_handle_no_return");
Julian Lettnerb6c06dc2019-02-01 02:51:00 +00004678 EmitNounwindRuntimeCall(Fn);
4679 }
Vedant Kumar09b5bfd2017-12-21 00:10:25 +00004680 }
4681
4682 EmitUnreachable(Loc);
Daniel Dunbarb960b7b2009-03-02 04:32:35 +00004683 Builder.ClearInsertionPoint();
Mike Stump11289f42009-09-09 15:08:12 +00004684
Mike Stump18bb9282009-05-16 07:57:57 +00004685 // FIXME: For now, emit a dummy basic block because expr emitters in
4686 // generally are not ready to handle emitting expressions at unreachable
4687 // points.
Daniel Dunbarb960b7b2009-03-02 04:32:35 +00004688 EnsureInsertPoint();
Mike Stump11289f42009-09-09 15:08:12 +00004689
Daniel Dunbarb960b7b2009-03-02 04:32:35 +00004690 // Return a reasonable RValue.
4691 return GetUndefRValue(RetTy);
Mike Stump11289f42009-09-09 15:08:12 +00004692 }
Daniel Dunbarb960b7b2009-03-02 04:32:35 +00004693
John McCall12f23522016-04-04 18:33:08 +00004694 // Perform the swifterror writeback.
4695 if (swiftErrorTemp.isValid()) {
4696 llvm::Value *errorResult = Builder.CreateLoad(swiftErrorTemp);
4697 Builder.CreateStore(errorResult, swiftErrorArg);
4698 }
4699
John McCallb92ab1a2016-10-26 23:46:34 +00004700 // Emit any call-associated writebacks immediately. Arguably this
4701 // should happen after any return-value munging.
John McCall31168b02011-06-15 23:02:42 +00004702 if (CallArgs.hasWritebacks())
4703 emitWritebacks(*this, CallArgs);
4704
Nico Weber8cdb3f92015-08-25 18:43:32 +00004705 // The stack cleanup for inalloca arguments has to run out of the normal
4706 // lexical order, so deactivate it and run it manually here.
4707 CallArgs.freeArgumentMemory(*this);
4708
John McCallb92ab1a2016-10-26 23:46:34 +00004709 // Extract the return value.
Hal Finkelee90a222014-09-26 05:04:30 +00004710 RValue Ret = [&] {
4711 switch (RetAI.getKind()) {
John McCallf26e73d2016-03-11 04:30:43 +00004712 case ABIArgInfo::CoerceAndExpand: {
4713 auto coercionType = RetAI.getCoerceAndExpandType();
John McCallf26e73d2016-03-11 04:30:43 +00004714
4715 Address addr = SRetPtr;
4716 addr = Builder.CreateElementBitCast(addr, coercionType);
4717
John McCall12f23522016-04-04 18:33:08 +00004718 assert(CI->getType() == RetAI.getUnpaddedCoerceAndExpandType());
4719 bool requiresExtract = isa<llvm::StructType>(CI->getType());
4720
John McCallf26e73d2016-03-11 04:30:43 +00004721 unsigned unpaddedIndex = 0;
4722 for (unsigned i = 0, e = coercionType->getNumElements(); i != e; ++i) {
4723 llvm::Type *eltType = coercionType->getElementType(i);
4724 if (ABIArgInfo::isPaddingForCoerceAndExpand(eltType)) continue;
James Y Knight751fe282019-02-09 22:22:28 +00004725 Address eltAddr = Builder.CreateStructGEP(addr, i);
John McCall12f23522016-04-04 18:33:08 +00004726 llvm::Value *elt = CI;
4727 if (requiresExtract)
4728 elt = Builder.CreateExtractValue(elt, unpaddedIndex++);
4729 else
4730 assert(unpaddedIndex == 0);
John McCallf26e73d2016-03-11 04:30:43 +00004731 Builder.CreateStore(elt, eltAddr);
4732 }
John McCall12f23522016-04-04 18:33:08 +00004733 // FALLTHROUGH
Galina Kistanova0872d6c2017-06-03 06:30:46 +00004734 LLVM_FALLTHROUGH;
John McCall12f23522016-04-04 18:33:08 +00004735 }
4736
4737 case ABIArgInfo::InAlloca:
4738 case ABIArgInfo::Indirect: {
4739 RValue ret = convertTempToRValue(SRetPtr, RetTy, SourceLocation());
George Burgess IV003be7c2018-03-08 05:32:30 +00004740 if (UnusedReturnSizePtr)
4741 PopCleanupBlock();
John McCall12f23522016-04-04 18:33:08 +00004742 return ret;
John McCallf26e73d2016-03-11 04:30:43 +00004743 }
4744
Hal Finkelee90a222014-09-26 05:04:30 +00004745 case ABIArgInfo::Ignore:
4746 // If we are ignoring an argument that had a result, make sure to
4747 // construct the appropriate return value for our caller.
4748 return GetUndefRValue(RetTy);
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00004749
Hal Finkelee90a222014-09-26 05:04:30 +00004750 case ABIArgInfo::Extend:
4751 case ABIArgInfo::Direct: {
4752 llvm::Type *RetIRTy = ConvertType(RetTy);
4753 if (RetAI.getCoerceToType() == RetIRTy && RetAI.getDirectOffset() == 0) {
4754 switch (getEvaluationKind(RetTy)) {
4755 case TEK_Complex: {
4756 llvm::Value *Real = Builder.CreateExtractValue(CI, 0);
4757 llvm::Value *Imag = Builder.CreateExtractValue(CI, 1);
4758 return RValue::getComplex(std::make_pair(Real, Imag));
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00004759 }
Hal Finkelee90a222014-09-26 05:04:30 +00004760 case TEK_Aggregate: {
John McCall7f416cc2015-09-08 08:05:57 +00004761 Address DestPtr = ReturnValue.getValue();
Hal Finkelee90a222014-09-26 05:04:30 +00004762 bool DestIsVolatile = ReturnValue.isVolatile();
4763
John McCall7f416cc2015-09-08 08:05:57 +00004764 if (!DestPtr.isValid()) {
Hal Finkelee90a222014-09-26 05:04:30 +00004765 DestPtr = CreateMemTemp(RetTy, "agg.tmp");
4766 DestIsVolatile = false;
4767 }
John McCall7f416cc2015-09-08 08:05:57 +00004768 BuildAggStore(*this, CI, DestPtr, DestIsVolatile);
Hal Finkelee90a222014-09-26 05:04:30 +00004769 return RValue::getAggregate(DestPtr);
4770 }
4771 case TEK_Scalar: {
4772 // If the argument doesn't match, perform a bitcast to coerce it. This
4773 // can happen due to trivial type mismatches.
4774 llvm::Value *V = CI;
4775 if (V->getType() != RetIRTy)
4776 V = Builder.CreateBitCast(V, RetIRTy);
4777 return RValue::get(V);
4778 }
4779 }
4780 llvm_unreachable("bad evaluation kind");
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00004781 }
Hal Finkelee90a222014-09-26 05:04:30 +00004782
John McCall7f416cc2015-09-08 08:05:57 +00004783 Address DestPtr = ReturnValue.getValue();
Hal Finkelee90a222014-09-26 05:04:30 +00004784 bool DestIsVolatile = ReturnValue.isVolatile();
4785
John McCall7f416cc2015-09-08 08:05:57 +00004786 if (!DestPtr.isValid()) {
Hal Finkelee90a222014-09-26 05:04:30 +00004787 DestPtr = CreateMemTemp(RetTy, "coerce");
4788 DestIsVolatile = false;
John McCall47fb9502013-03-07 21:37:08 +00004789 }
Hal Finkelee90a222014-09-26 05:04:30 +00004790
4791 // If the value is offset in memory, apply the offset now.
John McCall7f416cc2015-09-08 08:05:57 +00004792 Address StorePtr = emitAddressAtOffset(*this, DestPtr, RetAI);
4793 CreateCoercedStore(CI, StorePtr, DestIsVolatile, *this);
Hal Finkelee90a222014-09-26 05:04:30 +00004794
4795 return convertTempToRValue(DestPtr, RetTy, SourceLocation());
Chris Lattnerfe34c1d2010-07-29 06:26:06 +00004796 }
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00004797
Hal Finkelee90a222014-09-26 05:04:30 +00004798 case ABIArgInfo::Expand:
4799 llvm_unreachable("Invalid ABI kind for return argument");
Anders Carlsson17490832009-12-24 20:40:36 +00004800 }
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00004801
Hal Finkelee90a222014-09-26 05:04:30 +00004802 llvm_unreachable("Unhandled ABIArgInfo::Kind");
4803 } ();
Michael J. Spencerf5a1fbc2010-10-19 06:39:39 +00004804
John McCallb92ab1a2016-10-26 23:46:34 +00004805 // Emit the assume_aligned check on the return value.
Hal Finkelee90a222014-09-26 05:04:30 +00004806 if (Ret.isScalar() && TargetDecl) {
Roman Lebedeve819f7c2020-01-23 22:50:15 +03004807 AssumeAlignedAttrEmitter.EmitAsAnAssumption(Loc, RetTy, Ret);
Roman Lebedev5ffe6402020-01-23 22:50:24 +03004808 AllocAlignAttrEmitter.EmitAsAnAssumption(Loc, RetTy, Ret);
Daniel Dunbar573884e2008-09-10 07:04:09 +00004809 }
Daniel Dunbard3674e62008-09-11 01:48:57 +00004810
Francis Visoiu Mistrih143f6b82019-10-08 22:10:38 +00004811 // Explicitly call CallLifetimeEnd::Emit just to re-use the code even though
4812 // we can't use the full cleanup mechanism.
4813 for (CallLifetimeEnd &LifetimeEnd : CallLifetimeEndAfterCall)
4814 LifetimeEnd.Emit(*this, /*Flags=*/{});
4815
Akira Hatanakad35a4542019-11-20 18:13:44 -08004816 if (!ReturnValue.isExternallyDestructed() &&
4817 RetTy.isDestructedType() == QualType::DK_nontrivial_c_struct)
4818 pushDestroy(QualType::DK_nontrivial_c_struct, Ret.getAggregateAddress(),
4819 RetTy);
4820
Hal Finkelee90a222014-09-26 05:04:30 +00004821 return Ret;
Daniel Dunbar613855c2008-09-09 23:27:19 +00004822}
Daniel Dunbar2d0746f2009-02-10 20:44:09 +00004823
John McCall9831b842018-02-06 18:52:44 +00004824CGCallee CGCallee::prepareConcreteCallee(CodeGenFunction &CGF) const {
4825 if (isVirtual()) {
4826 const CallExpr *CE = getVirtualCallExpr();
4827 return CGF.CGM.getCXXABI().getVirtualFunctionPointer(
James Y Knightcfe8cd72019-02-07 01:15:41 +00004828 CGF, getVirtualMethodDecl(), getThisAddress(), getVirtualFunctionType(),
Stephen Kellyf2ceec42018-08-09 21:08:08 +00004829 CE ? CE->getBeginLoc() : SourceLocation());
John McCall9831b842018-02-06 18:52:44 +00004830 }
4831
4832 return *this;
4833}
4834
Daniel Dunbar2d0746f2009-02-10 20:44:09 +00004835/* VarArg handling */
4836
Charles Davisc7d5c942015-09-17 20:55:33 +00004837Address CodeGenFunction::EmitVAArg(VAArgExpr *VE, Address &VAListAddr) {
4838 VAListAddr = VE->isMicrosoftABI()
4839 ? EmitMSVAListRef(VE->getSubExpr())
4840 : EmitVAListRef(VE->getSubExpr());
4841 QualType Ty = VE->getType();
4842 if (VE->isMicrosoftABI())
4843 return CGM.getTypes().getABIInfo().EmitMSVAArg(*this, VAListAddr, Ty);
John McCall7f416cc2015-09-08 08:05:57 +00004844 return CGM.getTypes().getABIInfo().EmitVAArg(*this, VAListAddr, Ty);
Daniel Dunbar2d0746f2009-02-10 20:44:09 +00004845}