blob: 032862160464655c2729db19d05d7f7a80a9dfb7 [file] [log] [blame]
Shih-wei Liaof8fd82b2010-02-10 11:10:31 -08001//===--- CGExprCXX.cpp - Emit LLVM Code for C++ expressions ---------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This contains code dealing with code generation of C++ expressions
11//
12//===----------------------------------------------------------------------===//
13
14#include "CodeGenFunction.h"
15using namespace clang;
16using namespace CodeGen;
17
18RValue CodeGenFunction::EmitCXXMemberCall(const CXXMethodDecl *MD,
19 llvm::Value *Callee,
20 ReturnValueSlot ReturnValue,
21 llvm::Value *This,
22 llvm::Value *VTT,
23 CallExpr::const_arg_iterator ArgBeg,
24 CallExpr::const_arg_iterator ArgEnd) {
25 assert(MD->isInstance() &&
26 "Trying to emit a member call expr on a static method!");
27
28 const FunctionProtoType *FPT = MD->getType()->getAs<FunctionProtoType>();
29
30 CallArgList Args;
31
32 // Push the this ptr.
33 Args.push_back(std::make_pair(RValue::get(This),
34 MD->getThisType(getContext())));
35
36 // If there is a VTT parameter, emit it.
37 if (VTT) {
38 QualType T = getContext().getPointerType(getContext().VoidPtrTy);
39 Args.push_back(std::make_pair(RValue::get(VTT), T));
40 }
41
42 // And the rest of the call args
43 EmitCallArgs(Args, FPT, ArgBeg, ArgEnd);
44
45 QualType ResultType = FPT->getResultType();
46 return EmitCall(CGM.getTypes().getFunctionInfo(ResultType, Args,
47 FPT->getCallConv(),
48 FPT->getNoReturnAttr()), Callee,
49 ReturnValue, Args, MD);
50}
51
52/// canDevirtualizeMemberFunctionCalls - Checks whether virtual calls on given
53/// expr can be devirtualized.
54static bool canDevirtualizeMemberFunctionCalls(const Expr *Base) {
55 if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(Base)) {
56 if (const VarDecl *VD = dyn_cast<VarDecl>(DRE->getDecl())) {
57 // This is a record decl. We know the type and can devirtualize it.
58 return VD->getType()->isRecordType();
59 }
60
61 return false;
62 }
63
64 // We can always devirtualize calls on temporary object expressions.
65 if (isa<CXXConstructExpr>(Base))
66 return true;
67
68 // And calls on bound temporaries.
69 if (isa<CXXBindTemporaryExpr>(Base))
70 return true;
71
72 // Check if this is a call expr that returns a record type.
73 if (const CallExpr *CE = dyn_cast<CallExpr>(Base))
74 return CE->getCallReturnType()->isRecordType();
75
76 // We can't devirtualize the call.
77 return false;
78}
79
80RValue CodeGenFunction::EmitCXXMemberCallExpr(const CXXMemberCallExpr *CE,
81 ReturnValueSlot ReturnValue) {
82 if (isa<BinaryOperator>(CE->getCallee()->IgnoreParens()))
83 return EmitCXXMemberPointerCallExpr(CE, ReturnValue);
84
85 const MemberExpr *ME = cast<MemberExpr>(CE->getCallee()->IgnoreParens());
86 const CXXMethodDecl *MD = cast<CXXMethodDecl>(ME->getMemberDecl());
87
88 if (MD->isStatic()) {
89 // The method is static, emit it as we would a regular call.
90 llvm::Value *Callee = CGM.GetAddrOfFunction(MD);
91 return EmitCall(getContext().getPointerType(MD->getType()), Callee,
92 ReturnValue, CE->arg_begin(), CE->arg_end());
93 }
94
95 const FunctionProtoType *FPT = MD->getType()->getAs<FunctionProtoType>();
96
97 const llvm::Type *Ty =
98 CGM.getTypes().GetFunctionType(CGM.getTypes().getFunctionInfo(MD),
99 FPT->isVariadic());
100 llvm::Value *This;
101
102 if (ME->isArrow())
103 This = EmitScalarExpr(ME->getBase());
104 else {
105 LValue BaseLV = EmitLValue(ME->getBase());
106 This = BaseLV.getAddress();
107 }
108
109 if (MD->isCopyAssignment() && MD->isTrivial()) {
110 // We don't like to generate the trivial copy assignment operator when
111 // it isn't necessary; just produce the proper effect here.
112 llvm::Value *RHS = EmitLValue(*CE->arg_begin()).getAddress();
113 EmitAggregateCopy(This, RHS, CE->getType());
114 return RValue::get(This);
115 }
116
117 // C++ [class.virtual]p12:
118 // Explicit qualification with the scope operator (5.1) suppresses the
119 // virtual call mechanism.
120 //
121 // We also don't emit a virtual call if the base expression has a record type
122 // because then we know what the type is.
123 llvm::Value *Callee;
124 if (const CXXDestructorDecl *Destructor
125 = dyn_cast<CXXDestructorDecl>(MD)) {
126 if (Destructor->isTrivial())
127 return RValue::get(0);
128 if (MD->isVirtual() && !ME->hasQualifier() &&
129 !canDevirtualizeMemberFunctionCalls(ME->getBase())) {
130 Callee = BuildVirtualCall(Destructor, Dtor_Complete, This, Ty);
131 } else {
132 Callee = CGM.GetAddrOfFunction(GlobalDecl(Destructor, Dtor_Complete), Ty);
133 }
134 } else if (MD->isVirtual() && !ME->hasQualifier() &&
135 !canDevirtualizeMemberFunctionCalls(ME->getBase())) {
136 Callee = BuildVirtualCall(MD, This, Ty);
137 } else {
138 Callee = CGM.GetAddrOfFunction(MD, Ty);
139 }
140
141 return EmitCXXMemberCall(MD, Callee, ReturnValue, This, /*VTT=*/0,
142 CE->arg_begin(), CE->arg_end());
143}
144
145RValue
146CodeGenFunction::EmitCXXMemberPointerCallExpr(const CXXMemberCallExpr *E,
147 ReturnValueSlot ReturnValue) {
148 const BinaryOperator *BO =
149 cast<BinaryOperator>(E->getCallee()->IgnoreParens());
150 const Expr *BaseExpr = BO->getLHS();
151 const Expr *MemFnExpr = BO->getRHS();
152
153 const MemberPointerType *MPT =
154 MemFnExpr->getType()->getAs<MemberPointerType>();
155 const FunctionProtoType *FPT =
156 MPT->getPointeeType()->getAs<FunctionProtoType>();
157 const CXXRecordDecl *RD =
158 cast<CXXRecordDecl>(MPT->getClass()->getAs<RecordType>()->getDecl());
159
160 const llvm::FunctionType *FTy =
161 CGM.getTypes().GetFunctionType(CGM.getTypes().getFunctionInfo(RD, FPT),
162 FPT->isVariadic());
163
164 const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(VMContext);
165
166 // Get the member function pointer.
167 llvm::Value *MemFnPtr = CreateMemTemp(MemFnExpr->getType(), "mem.fn");
168 EmitAggExpr(MemFnExpr, MemFnPtr, /*VolatileDest=*/false);
169
170 // Emit the 'this' pointer.
171 llvm::Value *This;
172
173 if (BO->getOpcode() == BinaryOperator::PtrMemI)
174 This = EmitScalarExpr(BaseExpr);
175 else
176 This = EmitLValue(BaseExpr).getAddress();
177
178 // Adjust it.
179 llvm::Value *Adj = Builder.CreateStructGEP(MemFnPtr, 1);
180 Adj = Builder.CreateLoad(Adj, "mem.fn.adj");
181
182 llvm::Value *Ptr = Builder.CreateBitCast(This, Int8PtrTy, "ptr");
183 Ptr = Builder.CreateGEP(Ptr, Adj, "adj");
184
185 This = Builder.CreateBitCast(Ptr, This->getType(), "this");
186
187 llvm::Value *FnPtr = Builder.CreateStructGEP(MemFnPtr, 0, "mem.fn.ptr");
188
189 const llvm::Type *PtrDiffTy = ConvertType(getContext().getPointerDiffType());
190
191 llvm::Value *FnAsInt = Builder.CreateLoad(FnPtr, "fn");
192
193 // If the LSB in the function pointer is 1, the function pointer points to
194 // a virtual function.
195 llvm::Value *IsVirtual
196 = Builder.CreateAnd(FnAsInt, llvm::ConstantInt::get(PtrDiffTy, 1),
197 "and");
198
199 IsVirtual = Builder.CreateTrunc(IsVirtual,
200 llvm::Type::getInt1Ty(VMContext));
201
202 llvm::BasicBlock *FnVirtual = createBasicBlock("fn.virtual");
203 llvm::BasicBlock *FnNonVirtual = createBasicBlock("fn.nonvirtual");
204 llvm::BasicBlock *FnEnd = createBasicBlock("fn.end");
205
206 Builder.CreateCondBr(IsVirtual, FnVirtual, FnNonVirtual);
207 EmitBlock(FnVirtual);
208
209 const llvm::Type *VtableTy =
210 FTy->getPointerTo()->getPointerTo();
211
212 llvm::Value *Vtable = Builder.CreateBitCast(This, VtableTy->getPointerTo());
213 Vtable = Builder.CreateLoad(Vtable);
214
215 Vtable = Builder.CreateBitCast(Vtable, Int8PtrTy);
216 llvm::Value *VtableOffset =
217 Builder.CreateSub(FnAsInt, llvm::ConstantInt::get(PtrDiffTy, 1));
218
219 Vtable = Builder.CreateGEP(Vtable, VtableOffset, "fn");
220 Vtable = Builder.CreateBitCast(Vtable, VtableTy);
221
222 llvm::Value *VirtualFn = Builder.CreateLoad(Vtable, "virtualfn");
223
224 EmitBranch(FnEnd);
225 EmitBlock(FnNonVirtual);
226
227 // If the function is not virtual, just load the pointer.
228 llvm::Value *NonVirtualFn = Builder.CreateLoad(FnPtr, "fn");
229 NonVirtualFn = Builder.CreateIntToPtr(NonVirtualFn, FTy->getPointerTo());
230
231 EmitBlock(FnEnd);
232
233 llvm::PHINode *Callee = Builder.CreatePHI(FTy->getPointerTo());
234 Callee->reserveOperandSpace(2);
235 Callee->addIncoming(VirtualFn, FnVirtual);
236 Callee->addIncoming(NonVirtualFn, FnNonVirtual);
237
238 CallArgList Args;
239
240 QualType ThisType =
241 getContext().getPointerType(getContext().getTagDeclType(RD));
242
243 // Push the this ptr.
244 Args.push_back(std::make_pair(RValue::get(This), ThisType));
245
246 // And the rest of the call args
247 EmitCallArgs(Args, FPT, E->arg_begin(), E->arg_end());
248 const FunctionType *BO_FPT = BO->getType()->getAs<FunctionProtoType>();
249 return EmitCall(CGM.getTypes().getFunctionInfo(Args, BO_FPT), Callee,
250 ReturnValue, Args);
251}
252
253RValue
254CodeGenFunction::EmitCXXOperatorMemberCallExpr(const CXXOperatorCallExpr *E,
255 const CXXMethodDecl *MD,
256 ReturnValueSlot ReturnValue) {
257 assert(MD->isInstance() &&
258 "Trying to emit a member call expr on a static method!");
259
260 if (MD->isCopyAssignment()) {
261 const CXXRecordDecl *ClassDecl = cast<CXXRecordDecl>(MD->getDeclContext());
262 if (ClassDecl->hasTrivialCopyAssignment()) {
263 assert(!ClassDecl->hasUserDeclaredCopyAssignment() &&
264 "EmitCXXOperatorMemberCallExpr - user declared copy assignment");
265 llvm::Value *This = EmitLValue(E->getArg(0)).getAddress();
266 llvm::Value *Src = EmitLValue(E->getArg(1)).getAddress();
267 QualType Ty = E->getType();
268 EmitAggregateCopy(This, Src, Ty);
269 return RValue::get(This);
270 }
271 }
272
273 const FunctionProtoType *FPT = MD->getType()->getAs<FunctionProtoType>();
274 const llvm::Type *Ty =
275 CGM.getTypes().GetFunctionType(CGM.getTypes().getFunctionInfo(MD),
276 FPT->isVariadic());
277
278 llvm::Value *This = EmitLValue(E->getArg(0)).getAddress();
279
280 llvm::Value *Callee;
281 if (MD->isVirtual() && !canDevirtualizeMemberFunctionCalls(E->getArg(0)))
282 Callee = BuildVirtualCall(MD, This, Ty);
283 else
284 Callee = CGM.GetAddrOfFunction(MD, Ty);
285
286 return EmitCXXMemberCall(MD, Callee, ReturnValue, This, /*VTT=*/0,
287 E->arg_begin() + 1, E->arg_end());
288}
289
290void
291CodeGenFunction::EmitCXXConstructExpr(llvm::Value *Dest,
292 const CXXConstructExpr *E) {
293 assert(Dest && "Must have a destination!");
294 const CXXConstructorDecl *CD = E->getConstructor();
295 const ConstantArrayType *Array =
296 getContext().getAsConstantArrayType(E->getType());
297 // For a copy constructor, even if it is trivial, must fall thru so
298 // its argument is code-gen'ed.
299 if (!CD->isCopyConstructor()) {
300 QualType InitType = E->getType();
301 if (Array)
302 InitType = getContext().getBaseElementType(Array);
303 const CXXRecordDecl *RD =
304 cast<CXXRecordDecl>(InitType->getAs<RecordType>()->getDecl());
305 if (RD->hasTrivialConstructor())
306 return;
307 }
308 // Code gen optimization to eliminate copy constructor and return
309 // its first argument instead.
310 if (getContext().getLangOptions().ElideConstructors && E->isElidable()) {
311 const Expr *Arg = E->getArg(0);
312
313 if (const ImplicitCastExpr *ICE = dyn_cast<ImplicitCastExpr>(Arg)) {
314 assert((ICE->getCastKind() == CastExpr::CK_NoOp ||
315 ICE->getCastKind() == CastExpr::CK_ConstructorConversion ||
316 ICE->getCastKind() == CastExpr::CK_UserDefinedConversion) &&
317 "Unknown implicit cast kind in constructor elision");
318 Arg = ICE->getSubExpr();
319 }
320
321 if (const CXXFunctionalCastExpr *FCE = dyn_cast<CXXFunctionalCastExpr>(Arg))
322 Arg = FCE->getSubExpr();
323
324 if (const CXXBindTemporaryExpr *BindExpr =
325 dyn_cast<CXXBindTemporaryExpr>(Arg))
326 Arg = BindExpr->getSubExpr();
327
328 EmitAggExpr(Arg, Dest, false);
329 return;
330 }
331 if (Array) {
332 QualType BaseElementTy = getContext().getBaseElementType(Array);
333 const llvm::Type *BasePtr = ConvertType(BaseElementTy);
334 BasePtr = llvm::PointerType::getUnqual(BasePtr);
335 llvm::Value *BaseAddrPtr =
336 Builder.CreateBitCast(Dest, BasePtr);
337
338 EmitCXXAggrConstructorCall(CD, Array, BaseAddrPtr,
339 E->arg_begin(), E->arg_end());
340 }
341 else
342 // Call the constructor.
343 EmitCXXConstructorCall(CD,
344 E->isBaseInitialization()? Ctor_Base : Ctor_Complete,
345 Dest,
346 E->arg_begin(), E->arg_end());
347}
348
349static CharUnits CalculateCookiePadding(ASTContext &Ctx, QualType ElementType) {
350 const RecordType *RT = ElementType->getAs<RecordType>();
351 if (!RT)
352 return CharUnits::Zero();
353
354 const CXXRecordDecl *RD = dyn_cast<CXXRecordDecl>(RT->getDecl());
355 if (!RD)
356 return CharUnits::Zero();
357
358 // Check if the class has a trivial destructor.
359 if (RD->hasTrivialDestructor()) {
360 // Check if the usual deallocation function takes two arguments.
361 const CXXMethodDecl *UsualDeallocationFunction = 0;
362
363 DeclarationName OpName =
364 Ctx.DeclarationNames.getCXXOperatorName(OO_Array_Delete);
365 DeclContext::lookup_const_iterator Op, OpEnd;
366 for (llvm::tie(Op, OpEnd) = RD->lookup(OpName);
367 Op != OpEnd; ++Op) {
368 const CXXMethodDecl *Delete = cast<CXXMethodDecl>(*Op);
369
370 if (Delete->isUsualDeallocationFunction()) {
371 UsualDeallocationFunction = Delete;
372 break;
373 }
374 }
375
376 // No usual deallocation function, we don't need a cookie.
377 if (!UsualDeallocationFunction)
378 return CharUnits::Zero();
379
380 // The usual deallocation function doesn't take a size_t argument, so we
381 // don't need a cookie.
382 if (UsualDeallocationFunction->getNumParams() == 1)
383 return CharUnits::Zero();
384
385 assert(UsualDeallocationFunction->getNumParams() == 2 &&
386 "Unexpected deallocation function type!");
387 }
388
389 // Padding is the maximum of sizeof(size_t) and alignof(ElementType)
390 return std::max(Ctx.getTypeSizeInChars(Ctx.getSizeType()),
391 Ctx.getTypeAlignInChars(ElementType));
392}
393
394static CharUnits CalculateCookiePadding(ASTContext &Ctx, const CXXNewExpr *E) {
395 if (!E->isArray())
396 return CharUnits::Zero();
397
398 // No cookie is required if the new operator being used is
399 // ::operator new[](size_t, void*).
400 const FunctionDecl *OperatorNew = E->getOperatorNew();
401 if (OperatorNew->getDeclContext()->getLookupContext()->isFileContext()) {
402 if (OperatorNew->getNumParams() == 2) {
403 CanQualType ParamType =
404 Ctx.getCanonicalType(OperatorNew->getParamDecl(1)->getType());
405
406 if (ParamType == Ctx.VoidPtrTy)
407 return CharUnits::Zero();
408 }
409 }
410
411 return CalculateCookiePadding(Ctx, E->getAllocatedType());
412}
413
414static llvm::Value *EmitCXXNewAllocSize(CodeGenFunction &CGF,
415 const CXXNewExpr *E,
416 llvm::Value *& NumElements) {
417 QualType Type = E->getAllocatedType();
418 CharUnits TypeSize = CGF.getContext().getTypeSizeInChars(Type);
419 const llvm::Type *SizeTy = CGF.ConvertType(CGF.getContext().getSizeType());
420
421 if (!E->isArray())
422 return llvm::ConstantInt::get(SizeTy, TypeSize.getQuantity());
423
424 CharUnits CookiePadding = CalculateCookiePadding(CGF.getContext(), E);
425
426 Expr::EvalResult Result;
427 if (E->getArraySize()->Evaluate(Result, CGF.getContext()) &&
428 !Result.HasSideEffects && Result.Val.isInt()) {
429
430 CharUnits AllocSize =
431 Result.Val.getInt().getZExtValue() * TypeSize + CookiePadding;
432
433 NumElements =
434 llvm::ConstantInt::get(SizeTy, Result.Val.getInt().getZExtValue());
435
436 return llvm::ConstantInt::get(SizeTy, AllocSize.getQuantity());
437 }
438
439 // Emit the array size expression.
440 NumElements = CGF.EmitScalarExpr(E->getArraySize());
441
442 // Multiply with the type size.
443 llvm::Value *V =
444 CGF.Builder.CreateMul(NumElements,
445 llvm::ConstantInt::get(SizeTy,
446 TypeSize.getQuantity()));
447
448 // And add the cookie padding if necessary.
449 if (!CookiePadding.isZero())
450 V = CGF.Builder.CreateAdd(V,
451 llvm::ConstantInt::get(SizeTy, CookiePadding.getQuantity()));
452
453 return V;
454}
455
456static void EmitNewInitializer(CodeGenFunction &CGF, const CXXNewExpr *E,
457 llvm::Value *NewPtr,
458 llvm::Value *NumElements) {
459 if (E->isArray()) {
460 if (CXXConstructorDecl *Ctor = E->getConstructor())
461 CGF.EmitCXXAggrConstructorCall(Ctor, NumElements, NewPtr,
462 E->constructor_arg_begin(),
463 E->constructor_arg_end());
464 return;
465 }
466
467 QualType AllocType = E->getAllocatedType();
468
469 if (CXXConstructorDecl *Ctor = E->getConstructor()) {
470 CGF.EmitCXXConstructorCall(Ctor, Ctor_Complete, NewPtr,
471 E->constructor_arg_begin(),
472 E->constructor_arg_end());
473
474 return;
475 }
476
477 // We have a POD type.
478 if (E->getNumConstructorArgs() == 0)
479 return;
480
481 assert(E->getNumConstructorArgs() == 1 &&
482 "Can only have one argument to initializer of POD type.");
483
484 const Expr *Init = E->getConstructorArg(0);
485
486 if (!CGF.hasAggregateLLVMType(AllocType))
487 CGF.EmitStoreOfScalar(CGF.EmitScalarExpr(Init), NewPtr,
488 AllocType.isVolatileQualified(), AllocType);
489 else if (AllocType->isAnyComplexType())
490 CGF.EmitComplexExprIntoAddr(Init, NewPtr,
491 AllocType.isVolatileQualified());
492 else
493 CGF.EmitAggExpr(Init, NewPtr, AllocType.isVolatileQualified());
494}
495
496llvm::Value *CodeGenFunction::EmitCXXNewExpr(const CXXNewExpr *E) {
497 QualType AllocType = E->getAllocatedType();
498 FunctionDecl *NewFD = E->getOperatorNew();
499 const FunctionProtoType *NewFTy = NewFD->getType()->getAs<FunctionProtoType>();
500
501 CallArgList NewArgs;
502
503 // The allocation size is the first argument.
504 QualType SizeTy = getContext().getSizeType();
505
506 llvm::Value *NumElements = 0;
507 llvm::Value *AllocSize = EmitCXXNewAllocSize(*this, E, NumElements);
508
509 NewArgs.push_back(std::make_pair(RValue::get(AllocSize), SizeTy));
510
511 // Emit the rest of the arguments.
512 // FIXME: Ideally, this should just use EmitCallArgs.
513 CXXNewExpr::const_arg_iterator NewArg = E->placement_arg_begin();
514
515 // First, use the types from the function type.
516 // We start at 1 here because the first argument (the allocation size)
517 // has already been emitted.
518 for (unsigned i = 1, e = NewFTy->getNumArgs(); i != e; ++i, ++NewArg) {
519 QualType ArgType = NewFTy->getArgType(i);
520
521 assert(getContext().getCanonicalType(ArgType.getNonReferenceType()).
522 getTypePtr() ==
523 getContext().getCanonicalType(NewArg->getType()).getTypePtr() &&
524 "type mismatch in call argument!");
525
526 NewArgs.push_back(std::make_pair(EmitCallArg(*NewArg, ArgType),
527 ArgType));
528
529 }
530
531 // Either we've emitted all the call args, or we have a call to a
532 // variadic function.
533 assert((NewArg == E->placement_arg_end() || NewFTy->isVariadic()) &&
534 "Extra arguments in non-variadic function!");
535
536 // If we still have any arguments, emit them using the type of the argument.
537 for (CXXNewExpr::const_arg_iterator NewArgEnd = E->placement_arg_end();
538 NewArg != NewArgEnd; ++NewArg) {
539 QualType ArgType = NewArg->getType();
540 NewArgs.push_back(std::make_pair(EmitCallArg(*NewArg, ArgType),
541 ArgType));
542 }
543
544 // Emit the call to new.
545 RValue RV =
546 EmitCall(CGM.getTypes().getFunctionInfo(NewArgs, NewFTy),
547 CGM.GetAddrOfFunction(NewFD), ReturnValueSlot(), NewArgs, NewFD);
548
549 // If an allocation function is declared with an empty exception specification
550 // it returns null to indicate failure to allocate storage. [expr.new]p13.
551 // (We don't need to check for null when there's no new initializer and
552 // we're allocating a POD type).
553 bool NullCheckResult = NewFTy->hasEmptyExceptionSpec() &&
554 !(AllocType->isPODType() && !E->hasInitializer());
555
556 llvm::BasicBlock *NewNull = 0;
557 llvm::BasicBlock *NewNotNull = 0;
558 llvm::BasicBlock *NewEnd = 0;
559
560 llvm::Value *NewPtr = RV.getScalarVal();
561
562 if (NullCheckResult) {
563 NewNull = createBasicBlock("new.null");
564 NewNotNull = createBasicBlock("new.notnull");
565 NewEnd = createBasicBlock("new.end");
566
567 llvm::Value *IsNull =
568 Builder.CreateICmpEQ(NewPtr,
569 llvm::Constant::getNullValue(NewPtr->getType()),
570 "isnull");
571
572 Builder.CreateCondBr(IsNull, NewNull, NewNotNull);
573 EmitBlock(NewNotNull);
574 }
575
576 CharUnits CookiePadding = CalculateCookiePadding(getContext(), E);
577 if (!CookiePadding.isZero()) {
578 CharUnits CookieOffset =
579 CookiePadding - getContext().getTypeSizeInChars(SizeTy);
580
581 llvm::Value *NumElementsPtr =
582 Builder.CreateConstInBoundsGEP1_64(NewPtr, CookieOffset.getQuantity());
583
584 NumElementsPtr = Builder.CreateBitCast(NumElementsPtr,
585 ConvertType(SizeTy)->getPointerTo());
586 Builder.CreateStore(NumElements, NumElementsPtr);
587
588 // Now add the padding to the new ptr.
589 NewPtr = Builder.CreateConstInBoundsGEP1_64(NewPtr,
590 CookiePadding.getQuantity());
591 }
592
593 NewPtr = Builder.CreateBitCast(NewPtr, ConvertType(E->getType()));
594
595 EmitNewInitializer(*this, E, NewPtr, NumElements);
596
597 if (NullCheckResult) {
598 Builder.CreateBr(NewEnd);
599 NewNotNull = Builder.GetInsertBlock();
600 EmitBlock(NewNull);
601 Builder.CreateBr(NewEnd);
602 EmitBlock(NewEnd);
603
604 llvm::PHINode *PHI = Builder.CreatePHI(NewPtr->getType());
605 PHI->reserveOperandSpace(2);
606 PHI->addIncoming(NewPtr, NewNotNull);
607 PHI->addIncoming(llvm::Constant::getNullValue(NewPtr->getType()), NewNull);
608
609 NewPtr = PHI;
610 }
611
612 return NewPtr;
613}
614
615static std::pair<llvm::Value *, llvm::Value *>
616GetAllocatedObjectPtrAndNumElements(CodeGenFunction &CGF,
617 llvm::Value *Ptr, QualType DeleteTy) {
618 QualType SizeTy = CGF.getContext().getSizeType();
619 const llvm::Type *SizeLTy = CGF.ConvertType(SizeTy);
620
621 CharUnits DeleteTypeAlign = CGF.getContext().getTypeAlignInChars(DeleteTy);
622 CharUnits CookiePadding =
623 std::max(CGF.getContext().getTypeSizeInChars(SizeTy),
624 DeleteTypeAlign);
625 assert(!CookiePadding.isZero() && "CookiePadding should not be 0.");
626
627 const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(CGF.getLLVMContext());
628 CharUnits CookieOffset =
629 CookiePadding - CGF.getContext().getTypeSizeInChars(SizeTy);
630
631 llvm::Value *AllocatedObjectPtr = CGF.Builder.CreateBitCast(Ptr, Int8PtrTy);
632 AllocatedObjectPtr =
633 CGF.Builder.CreateConstInBoundsGEP1_64(AllocatedObjectPtr,
634 -CookiePadding.getQuantity());
635
636 llvm::Value *NumElementsPtr =
637 CGF.Builder.CreateConstInBoundsGEP1_64(AllocatedObjectPtr,
638 CookieOffset.getQuantity());
639 NumElementsPtr =
640 CGF.Builder.CreateBitCast(NumElementsPtr, SizeLTy->getPointerTo());
641
642 llvm::Value *NumElements = CGF.Builder.CreateLoad(NumElementsPtr);
643 NumElements =
644 CGF.Builder.CreateIntCast(NumElements, SizeLTy, /*isSigned=*/false);
645
646 return std::make_pair(AllocatedObjectPtr, NumElements);
647}
648
649void CodeGenFunction::EmitDeleteCall(const FunctionDecl *DeleteFD,
650 llvm::Value *Ptr,
651 QualType DeleteTy) {
652 const FunctionProtoType *DeleteFTy =
653 DeleteFD->getType()->getAs<FunctionProtoType>();
654
655 CallArgList DeleteArgs;
656
657 // Check if we need to pass the size to the delete operator.
658 llvm::Value *Size = 0;
659 QualType SizeTy;
660 if (DeleteFTy->getNumArgs() == 2) {
661 SizeTy = DeleteFTy->getArgType(1);
662 CharUnits DeleteTypeSize = getContext().getTypeSizeInChars(DeleteTy);
663 Size = llvm::ConstantInt::get(ConvertType(SizeTy),
664 DeleteTypeSize.getQuantity());
665 }
666
667 if (DeleteFD->getOverloadedOperator() == OO_Array_Delete &&
668 !CalculateCookiePadding(getContext(), DeleteTy).isZero()) {
669 // We need to get the number of elements in the array from the cookie.
670 llvm::Value *AllocatedObjectPtr;
671 llvm::Value *NumElements;
672 llvm::tie(AllocatedObjectPtr, NumElements) =
673 GetAllocatedObjectPtrAndNumElements(*this, Ptr, DeleteTy);
674
675 // Multiply the size with the number of elements.
676 if (Size)
677 Size = Builder.CreateMul(NumElements, Size);
678
679 Ptr = AllocatedObjectPtr;
680 }
681
682 QualType ArgTy = DeleteFTy->getArgType(0);
683 llvm::Value *DeletePtr = Builder.CreateBitCast(Ptr, ConvertType(ArgTy));
684 DeleteArgs.push_back(std::make_pair(RValue::get(DeletePtr), ArgTy));
685
686 if (Size)
687 DeleteArgs.push_back(std::make_pair(RValue::get(Size), SizeTy));
688
689 // Emit the call to delete.
690 EmitCall(CGM.getTypes().getFunctionInfo(DeleteArgs, DeleteFTy),
691 CGM.GetAddrOfFunction(DeleteFD), ReturnValueSlot(),
692 DeleteArgs, DeleteFD);
693}
694
695void CodeGenFunction::EmitCXXDeleteExpr(const CXXDeleteExpr *E) {
696
697 // Get at the argument before we performed the implicit conversion
698 // to void*.
699 const Expr *Arg = E->getArgument();
700 while (const ImplicitCastExpr *ICE = dyn_cast<ImplicitCastExpr>(Arg)) {
701 if (ICE->getCastKind() != CastExpr::CK_UserDefinedConversion &&
702 ICE->getType()->isVoidPointerType())
703 Arg = ICE->getSubExpr();
704 else
705 break;
706 }
707
708 QualType DeleteTy = Arg->getType()->getAs<PointerType>()->getPointeeType();
709
710 llvm::Value *Ptr = EmitScalarExpr(Arg);
711
712 // Null check the pointer.
713 llvm::BasicBlock *DeleteNotNull = createBasicBlock("delete.notnull");
714 llvm::BasicBlock *DeleteEnd = createBasicBlock("delete.end");
715
716 llvm::Value *IsNull =
717 Builder.CreateICmpEQ(Ptr, llvm::Constant::getNullValue(Ptr->getType()),
718 "isnull");
719
720 Builder.CreateCondBr(IsNull, DeleteEnd, DeleteNotNull);
721 EmitBlock(DeleteNotNull);
722
723 bool ShouldCallDelete = true;
724
725 // Call the destructor if necessary.
726 if (const RecordType *RT = DeleteTy->getAs<RecordType>()) {
727 if (CXXRecordDecl *RD = dyn_cast<CXXRecordDecl>(RT->getDecl())) {
728 if (!RD->hasTrivialDestructor()) {
729 const CXXDestructorDecl *Dtor = RD->getDestructor(getContext());
730 if (E->isArrayForm()) {
731 llvm::Value *AllocatedObjectPtr;
732 llvm::Value *NumElements;
733 llvm::tie(AllocatedObjectPtr, NumElements) =
734 GetAllocatedObjectPtrAndNumElements(*this, Ptr, DeleteTy);
735
736 EmitCXXAggrDestructorCall(Dtor, NumElements, Ptr);
737 } else if (Dtor->isVirtual()) {
738 const llvm::Type *Ty =
739 CGM.getTypes().GetFunctionType(CGM.getTypes().getFunctionInfo(Dtor),
740 /*isVariadic=*/false);
741
742 llvm::Value *Callee = BuildVirtualCall(Dtor, Dtor_Deleting, Ptr, Ty);
743 EmitCXXMemberCall(Dtor, Callee, ReturnValueSlot(), Ptr, /*VTT=*/0,
744 0, 0);
745
746 // The dtor took care of deleting the object.
747 ShouldCallDelete = false;
748 } else
749 EmitCXXDestructorCall(Dtor, Dtor_Complete, Ptr);
750 }
751 }
752 }
753
754 if (ShouldCallDelete)
755 EmitDeleteCall(E->getOperatorDelete(), Ptr, DeleteTy);
756
757 EmitBlock(DeleteEnd);
758}
759
760llvm::Value * CodeGenFunction::EmitCXXTypeidExpr(const CXXTypeidExpr *E) {
761 QualType Ty = E->getType();
762 const llvm::Type *LTy = ConvertType(Ty)->getPointerTo();
763
764 if (E->isTypeOperand()) {
765 llvm::Constant *TypeInfo =
766 CGM.GetAddrOfRTTIDescriptor(E->getTypeOperand());
767 return Builder.CreateBitCast(TypeInfo, LTy);
768 }
769
770 Expr *subE = E->getExprOperand();
771 Ty = subE->getType();
772 CanQualType CanTy = CGM.getContext().getCanonicalType(Ty);
773 Ty = CanTy.getUnqualifiedType().getNonReferenceType();
774 if (const RecordType *RT = Ty->getAs<RecordType>()) {
775 const CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
776 if (RD->isPolymorphic()) {
777 // FIXME: if subE is an lvalue do
778 LValue Obj = EmitLValue(subE);
779 llvm::Value *This = Obj.getAddress();
780 LTy = LTy->getPointerTo()->getPointerTo();
781 llvm::Value *V = Builder.CreateBitCast(This, LTy);
782 // We need to do a zero check for *p, unless it has NonNullAttr.
783 // FIXME: PointerType->hasAttr<NonNullAttr>()
784 bool CanBeZero = false;
785 if (UnaryOperator *UO = dyn_cast<UnaryOperator>(subE->IgnoreParens()))
786 if (UO->getOpcode() == UnaryOperator::Deref)
787 CanBeZero = true;
788 if (CanBeZero) {
789 llvm::BasicBlock *NonZeroBlock = createBasicBlock();
790 llvm::BasicBlock *ZeroBlock = createBasicBlock();
791
792 llvm::Value *Zero = llvm::Constant::getNullValue(LTy);
793 Builder.CreateCondBr(Builder.CreateICmpNE(V, Zero),
794 NonZeroBlock, ZeroBlock);
795 EmitBlock(ZeroBlock);
796 /// Call __cxa_bad_typeid
797 const llvm::Type *ResultType = llvm::Type::getVoidTy(VMContext);
798 const llvm::FunctionType *FTy;
799 FTy = llvm::FunctionType::get(ResultType, false);
800 llvm::Value *F = CGM.CreateRuntimeFunction(FTy, "__cxa_bad_typeid");
801 Builder.CreateCall(F)->setDoesNotReturn();
802 Builder.CreateUnreachable();
803 EmitBlock(NonZeroBlock);
804 }
805 V = Builder.CreateLoad(V, "vtable");
806 V = Builder.CreateConstInBoundsGEP1_64(V, -1ULL);
807 V = Builder.CreateLoad(V);
808 return V;
809 }
810 }
811 return Builder.CreateBitCast(CGM.GetAddrOfRTTIDescriptor(Ty), LTy);
812}
813
814llvm::Value *CodeGenFunction::EmitDynamicCast(llvm::Value *V,
815 const CXXDynamicCastExpr *DCE) {
816 QualType SrcTy = DCE->getSubExpr()->getType();
817 QualType DestTy = DCE->getTypeAsWritten();
818 QualType InnerType = DestTy->getPointeeType();
819
820 const llvm::Type *LTy = ConvertType(DCE->getType());
821
822 bool CanBeZero = false;
823 bool ToVoid = false;
824 bool ThrowOnBad = false;
825 if (DestTy->isPointerType()) {
826 // FIXME: if PointerType->hasAttr<NonNullAttr>(), we don't set this
827 CanBeZero = true;
828 if (InnerType->isVoidType())
829 ToVoid = true;
830 } else {
831 LTy = LTy->getPointerTo();
832 ThrowOnBad = true;
833 }
834
835 if (SrcTy->isPointerType() || SrcTy->isReferenceType())
836 SrcTy = SrcTy->getPointeeType();
837 SrcTy = SrcTy.getUnqualifiedType();
838
839 if (DestTy->isPointerType() || DestTy->isReferenceType())
840 DestTy = DestTy->getPointeeType();
841 DestTy = DestTy.getUnqualifiedType();
842
843 llvm::BasicBlock *ContBlock = createBasicBlock();
844 llvm::BasicBlock *NullBlock = 0;
845 llvm::BasicBlock *NonZeroBlock = 0;
846 if (CanBeZero) {
847 NonZeroBlock = createBasicBlock();
848 NullBlock = createBasicBlock();
849 Builder.CreateCondBr(Builder.CreateIsNotNull(V), NonZeroBlock, NullBlock);
850 EmitBlock(NonZeroBlock);
851 }
852
853 llvm::BasicBlock *BadCastBlock = 0;
854
855 const llvm::Type *PtrDiffTy = ConvertType(getContext().getPointerDiffType());
856
857 // See if this is a dynamic_cast(void*)
858 if (ToVoid) {
859 llvm::Value *This = V;
860 V = Builder.CreateBitCast(This, PtrDiffTy->getPointerTo()->getPointerTo());
861 V = Builder.CreateLoad(V, "vtable");
862 V = Builder.CreateConstInBoundsGEP1_64(V, -2ULL);
863 V = Builder.CreateLoad(V, "offset to top");
864 This = Builder.CreateBitCast(This, llvm::Type::getInt8PtrTy(VMContext));
865 V = Builder.CreateInBoundsGEP(This, V);
866 V = Builder.CreateBitCast(V, LTy);
867 } else {
868 /// Call __dynamic_cast
869 const llvm::Type *ResultType = llvm::Type::getInt8PtrTy(VMContext);
870 const llvm::FunctionType *FTy;
871 std::vector<const llvm::Type*> ArgTys;
872 const llvm::Type *PtrToInt8Ty
873 = llvm::Type::getInt8Ty(VMContext)->getPointerTo();
874 ArgTys.push_back(PtrToInt8Ty);
875 ArgTys.push_back(PtrToInt8Ty);
876 ArgTys.push_back(PtrToInt8Ty);
877 ArgTys.push_back(PtrDiffTy);
878 FTy = llvm::FunctionType::get(ResultType, ArgTys, false);
879
880 // FIXME: Calculate better hint.
881 llvm::Value *hint = llvm::ConstantInt::get(PtrDiffTy, -1ULL);
882
883 assert(SrcTy->isRecordType() && "Src type must be record type!");
884 assert(DestTy->isRecordType() && "Dest type must be record type!");
885
886 llvm::Value *SrcArg
887 = CGM.GetAddrOfRTTIDescriptor(SrcTy.getUnqualifiedType());
888 llvm::Value *DestArg
889 = CGM.GetAddrOfRTTIDescriptor(DestTy.getUnqualifiedType());
890
891 V = Builder.CreateBitCast(V, PtrToInt8Ty);
892 V = Builder.CreateCall4(CGM.CreateRuntimeFunction(FTy, "__dynamic_cast"),
893 V, SrcArg, DestArg, hint);
894 V = Builder.CreateBitCast(V, LTy);
895
896 if (ThrowOnBad) {
897 BadCastBlock = createBasicBlock();
898
899 Builder.CreateCondBr(Builder.CreateIsNotNull(V), ContBlock, BadCastBlock);
900 EmitBlock(BadCastBlock);
901 /// Call __cxa_bad_cast
902 ResultType = llvm::Type::getVoidTy(VMContext);
903 const llvm::FunctionType *FBadTy;
904 FBadTy = llvm::FunctionType::get(ResultType, false);
905 llvm::Value *F = CGM.CreateRuntimeFunction(FBadTy, "__cxa_bad_cast");
906 Builder.CreateCall(F)->setDoesNotReturn();
907 Builder.CreateUnreachable();
908 }
909 }
910
911 if (CanBeZero) {
912 Builder.CreateBr(ContBlock);
913 EmitBlock(NullBlock);
914 Builder.CreateBr(ContBlock);
915 }
916 EmitBlock(ContBlock);
917 if (CanBeZero) {
918 llvm::PHINode *PHI = Builder.CreatePHI(LTy);
919 PHI->reserveOperandSpace(2);
920 PHI->addIncoming(V, NonZeroBlock);
921 PHI->addIncoming(llvm::Constant::getNullValue(LTy), NullBlock);
922 V = PHI;
923 }
924
925 return V;
926}