blob: 5e5e2a49cc2b2460bb5a4d886f9066599b1763c7 [file] [log] [blame]
Reid Spencer5f016e22007-07-11 17:01:13 +00001//===--- CodeGenFunction.cpp - Emit LLVM Code from ASTs for a Function ----===//
2//
3// The LLVM Compiler Infrastructure
4//
Chris Lattner0bc735f2007-12-29 19:59:25 +00005// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
Reid Spencer5f016e22007-07-11 17:01:13 +00007//
8//===----------------------------------------------------------------------===//
9//
10// This coordinates the per-function state used while generating code.
11//
12//===----------------------------------------------------------------------===//
13
14#include "CodeGenFunction.h"
15#include "CodeGenModule.h"
Eli Friedman3f2af102008-05-22 01:40:10 +000016#include "CGDebugInfo.h"
John McCallf1549f62010-07-06 01:34:17 +000017#include "CGException.h"
Reid Spencer5f016e22007-07-11 17:01:13 +000018#include "clang/Basic/TargetInfo.h"
Chris Lattner31a09842008-11-12 08:04:58 +000019#include "clang/AST/APValue.h"
Daniel Dunbarde7fb842008-08-11 05:00:27 +000020#include "clang/AST/ASTContext.h"
Daniel Dunbarc4a1dea2008-08-11 05:35:13 +000021#include "clang/AST/Decl.h"
Anders Carlsson2b77ba82009-04-04 20:47:02 +000022#include "clang/AST/DeclCXX.h"
Mike Stump6a1e0eb2009-12-04 23:26:17 +000023#include "clang/AST/StmtCXX.h"
Chris Lattner7255a2d2010-06-22 00:03:40 +000024#include "clang/Frontend/CodeGenOptions.h"
Mike Stump4e7a1f72009-02-21 20:00:35 +000025#include "llvm/Target/TargetData.h"
Chris Lattner7255a2d2010-06-22 00:03:40 +000026#include "llvm/Intrinsics.h"
Reid Spencer5f016e22007-07-11 17:01:13 +000027using namespace clang;
28using namespace CodeGen;
29
Mike Stump1eb44332009-09-09 15:08:12 +000030CodeGenFunction::CodeGenFunction(CodeGenModule &cgm)
Mike Stumpa4f668f2009-03-06 01:33:24 +000031 : BlockFunction(cgm, *this, Builder), CGM(cgm),
32 Target(CGM.getContext().Target),
Owen Andersonaac87052009-07-08 20:52:20 +000033 Builder(cgm.getModule().getContext()),
John McCallf1549f62010-07-06 01:34:17 +000034 ExceptionSlot(0), DebugInfo(0), IndirectBranch(0),
Chris Lattner3d00fdc2009-10-13 06:55:33 +000035 SwitchInsn(0), CaseRangeBlock(0), InvokeDest(0),
John McCallf1549f62010-07-06 01:34:17 +000036 DidCallStackSave(false), UnreachableBlock(0),
John McCall25049412010-02-16 22:04:33 +000037 CXXThisDecl(0), CXXThisValue(0), CXXVTTDecl(0), CXXVTTValue(0),
John McCallf1549f62010-07-06 01:34:17 +000038 ConditionalBranchLevel(0), TerminateLandingPad(0), TerminateHandler(0),
Chris Lattner6c552c12010-07-20 20:19:24 +000039 TrapBB(0), ThrowLengthErrorBB(0) {
Chris Lattner77b89b82010-06-27 07:15:29 +000040
41 // Get some frequently used types.
Mike Stump4e7a1f72009-02-21 20:00:35 +000042 LLVMPointerWidth = Target.getPointerWidth(0);
Chris Lattner77b89b82010-06-27 07:15:29 +000043 llvm::LLVMContext &LLVMContext = CGM.getLLVMContext();
44 IntPtrTy = llvm::IntegerType::get(LLVMContext, LLVMPointerWidth);
45 Int32Ty = llvm::Type::getInt32Ty(LLVMContext);
46 Int64Ty = llvm::Type::getInt64Ty(LLVMContext);
47
Mike Stumpd88ea562009-12-09 03:35:49 +000048 Exceptions = getContext().getLangOptions().Exceptions;
Mike Stump9c276ae2009-12-12 01:27:46 +000049 CatchUndefined = getContext().getLangOptions().CatchUndefined;
Douglas Gregor35415f52010-05-25 17:04:15 +000050 CGM.getMangleContext().startNewFunction();
Chris Lattner41110242008-06-17 18:05:57 +000051}
Reid Spencer5f016e22007-07-11 17:01:13 +000052
53ASTContext &CodeGenFunction::getContext() const {
54 return CGM.getContext();
55}
56
57
Daniel Dunbar0096acf2009-02-25 19:24:29 +000058llvm::Value *CodeGenFunction::GetAddrOfLocalVar(const VarDecl *VD) {
59 llvm::Value *Res = LocalDeclMap[VD];
60 assert(Res && "Invalid argument to GetAddrOfLocalVar(), no decl!");
61 return Res;
Lauro Ramos Venancio81373352008-02-26 21:41:45 +000062}
Reid Spencer5f016e22007-07-11 17:01:13 +000063
Daniel Dunbar0096acf2009-02-25 19:24:29 +000064llvm::Constant *
65CodeGenFunction::GetAddrOfStaticLocalVar(const VarDecl *BVD) {
66 return cast<llvm::Constant>(GetAddrOfLocalVar(BVD));
Anders Carlssondde0a942008-09-11 09:15:33 +000067}
68
Daniel Dunbar8b1a3432009-02-03 23:03:55 +000069const llvm::Type *CodeGenFunction::ConvertTypeForMem(QualType T) {
70 return CGM.getTypes().ConvertTypeForMem(T);
71}
72
Reid Spencer5f016e22007-07-11 17:01:13 +000073const llvm::Type *CodeGenFunction::ConvertType(QualType T) {
74 return CGM.getTypes().ConvertType(T);
75}
76
77bool CodeGenFunction::hasAggregateLLVMType(QualType T) {
Anders Carlssone9d34dc2009-09-29 02:09:01 +000078 return T->isRecordType() || T->isArrayType() || T->isAnyComplexType() ||
79 T->isMemberFunctionPointerType();
Reid Spencer5f016e22007-07-11 17:01:13 +000080}
81
Daniel Dunbar1c1d6072009-01-26 23:27:52 +000082void CodeGenFunction::EmitReturnBlock() {
83 // For cleanliness, we try to avoid emitting the return block for
84 // simple cases.
85 llvm::BasicBlock *CurBB = Builder.GetInsertBlock();
86
87 if (CurBB) {
88 assert(!CurBB->getTerminator() && "Unexpected terminated block.");
89
Daniel Dunbar96e18b02009-07-19 08:24:34 +000090 // We have a valid insert point, reuse it if it is empty or there are no
91 // explicit jumps to the return block.
John McCallf1549f62010-07-06 01:34:17 +000092 if (CurBB->empty() || ReturnBlock.Block->use_empty()) {
93 ReturnBlock.Block->replaceAllUsesWith(CurBB);
94 delete ReturnBlock.Block;
Daniel Dunbar96e18b02009-07-19 08:24:34 +000095 } else
John McCallf1549f62010-07-06 01:34:17 +000096 EmitBlock(ReturnBlock.Block);
Daniel Dunbar1c1d6072009-01-26 23:27:52 +000097 return;
98 }
99
100 // Otherwise, if the return block is the target of a single direct
101 // branch then we can just put the code in that block instead. This
102 // cleans up functions which started with a unified return block.
John McCallf1549f62010-07-06 01:34:17 +0000103 if (ReturnBlock.Block->hasOneUse()) {
Mike Stump1eb44332009-09-09 15:08:12 +0000104 llvm::BranchInst *BI =
John McCallf1549f62010-07-06 01:34:17 +0000105 dyn_cast<llvm::BranchInst>(*ReturnBlock.Block->use_begin());
106 if (BI && BI->isUnconditional() &&
107 BI->getSuccessor(0) == ReturnBlock.Block) {
Daniel Dunbar1c1d6072009-01-26 23:27:52 +0000108 // Reset insertion point and delete the branch.
109 Builder.SetInsertPoint(BI->getParent());
110 BI->eraseFromParent();
John McCallf1549f62010-07-06 01:34:17 +0000111 delete ReturnBlock.Block;
Daniel Dunbar1c1d6072009-01-26 23:27:52 +0000112 return;
113 }
114 }
115
Mike Stumpf5408fe2009-05-16 07:57:57 +0000116 // FIXME: We are at an unreachable point, there is no reason to emit the block
117 // unless it has uses. However, we still need a place to put the debug
118 // region.end for now.
Daniel Dunbar1c1d6072009-01-26 23:27:52 +0000119
John McCallf1549f62010-07-06 01:34:17 +0000120 EmitBlock(ReturnBlock.Block);
121}
122
123static void EmitIfUsed(CodeGenFunction &CGF, llvm::BasicBlock *BB) {
124 if (!BB) return;
125 if (!BB->use_empty())
126 return CGF.CurFn->getBasicBlockList().push_back(BB);
127 delete BB;
Daniel Dunbar1c1d6072009-01-26 23:27:52 +0000128}
129
Daniel Dunbaraf05bb92008-08-26 08:29:31 +0000130void CodeGenFunction::FinishFunction(SourceLocation EndLoc) {
Chris Lattnerda138702007-07-16 21:28:45 +0000131 assert(BreakContinueStack.empty() &&
132 "mismatched push/pop in break/continue stack!");
Mike Stump1eb44332009-09-09 15:08:12 +0000133
134 // Emit function epilog (to return).
Daniel Dunbar1c1d6072009-01-26 23:27:52 +0000135 EmitReturnBlock();
Daniel Dunbarf5bd45c2008-11-11 20:59:54 +0000136
Chris Lattner7255a2d2010-06-22 00:03:40 +0000137 EmitFunctionInstrumentation("__cyg_profile_func_exit");
138
Daniel Dunbarf5bd45c2008-11-11 20:59:54 +0000139 // Emit debug descriptor for function end.
Anders Carlssone896d982009-02-13 08:11:52 +0000140 if (CGDebugInfo *DI = getDebugInfo()) {
Daniel Dunbarf5bd45c2008-11-11 20:59:54 +0000141 DI->setLocation(EndLoc);
142 DI->EmitRegionEnd(CurFn, Builder);
143 }
144
Chris Lattner35b21b82010-06-27 01:06:27 +0000145 EmitFunctionEpilog(*CurFnInfo);
Mike Stumpcce3d4f2009-12-07 23:38:24 +0000146 EmitEndEHSpec(CurCodeDecl);
Daniel Dunbar5ca20842008-09-09 21:00:17 +0000147
John McCallf1549f62010-07-06 01:34:17 +0000148 assert(EHStack.empty() &&
149 "did not remove all scopes from cleanup stack!");
150
Chris Lattnerd9becd12009-10-28 23:59:40 +0000151 // If someone did an indirect goto, emit the indirect goto block at the end of
152 // the function.
153 if (IndirectBranch) {
154 EmitBlock(IndirectBranch->getParent());
155 Builder.ClearInsertionPoint();
156 }
157
Chris Lattner6c552c12010-07-20 20:19:24 +0000158 // If someone called operator new[] and needs a throw_length_error block, emit
159 // it at the end of the function.
160 if (ThrowLengthErrorBB) {
161 EmitBlock(ThrowLengthErrorBB);
162 Builder.ClearInsertionPoint();
163 }
164
Chris Lattner5a2fa142007-12-02 06:32:24 +0000165 // Remove the AllocaInsertPt instruction, which is just a convenience for us.
Chris Lattner481769b2009-03-31 22:17:44 +0000166 llvm::Instruction *Ptr = AllocaInsertPt;
Chris Lattner5a2fa142007-12-02 06:32:24 +0000167 AllocaInsertPt = 0;
Chris Lattner481769b2009-03-31 22:17:44 +0000168 Ptr->eraseFromParent();
Chris Lattnerd9becd12009-10-28 23:59:40 +0000169
170 // If someone took the address of a label but never did an indirect goto, we
171 // made a zero entry PHI node, which is illegal, zap it now.
172 if (IndirectBranch) {
173 llvm::PHINode *PN = cast<llvm::PHINode>(IndirectBranch->getAddress());
174 if (PN->getNumIncomingValues() == 0) {
175 PN->replaceAllUsesWith(llvm::UndefValue::get(PN->getType()));
176 PN->eraseFromParent();
177 }
178 }
John McCallf1549f62010-07-06 01:34:17 +0000179
180 EmitIfUsed(*this, TerminateLandingPad);
181 EmitIfUsed(*this, TerminateHandler);
182 EmitIfUsed(*this, UnreachableBlock);
John McCall744016d2010-07-06 23:57:41 +0000183
184 if (CGM.getCodeGenOpts().EmitDeclMetadata)
185 EmitDeclMetadata();
Reid Spencer5f016e22007-07-11 17:01:13 +0000186}
187
Chris Lattner6c552c12010-07-20 20:19:24 +0000188/// getThrowLengthErrorBB - Create a basic block that will call
189/// std::__throw_length_error to throw a std::length_error exception.
190llvm::BasicBlock *CodeGenFunction::getThrowLengthErrorBB() {
191 if (ThrowLengthErrorBB) return ThrowLengthErrorBB;
192
193 llvm::IRBuilder<>::InsertPoint SavedIP = Builder.saveIP();
194
195 ThrowLengthErrorBB = createBasicBlock("throw_length_error");
196 Builder.SetInsertPoint(ThrowLengthErrorBB);
197
198 // Call to void std::__throw_length_error("length_error");
199 const llvm::Type *ResultType = Builder.getVoidTy();
200 const llvm::Type *PtrToInt8Ty = Builder.getInt8PtrTy();
201 std::vector<const llvm::Type*> ArgTys(1, PtrToInt8Ty);
202 llvm::Constant *Fn =
203 CGM.CreateRuntimeFunction(llvm::FunctionType::get(ResultType, ArgTys, false),
204 "_ZSt20__throw_length_errorPKc");
205
206 llvm::Value *C = CGM.GetAddrOfConstantCString("length_error");
207 C = Builder.CreateStructGEP(C, 0, "arraydecay");
208 llvm::CallInst *TheCall = Builder.CreateCall(Fn, C);
209 TheCall->setDoesNotReturn();
210
211 Builder.CreateUnreachable();
212
213
214 Builder.restoreIP(SavedIP);
215 return ThrowLengthErrorBB;
216}
217
218
Chris Lattner7255a2d2010-06-22 00:03:40 +0000219/// ShouldInstrumentFunction - Return true if the current function should be
220/// instrumented with __cyg_profile_func_* calls
221bool CodeGenFunction::ShouldInstrumentFunction() {
222 if (!CGM.getCodeGenOpts().InstrumentFunctions)
223 return false;
224 if (CurFuncDecl->hasAttr<NoInstrumentFunctionAttr>())
225 return false;
226 return true;
227}
228
229/// EmitFunctionInstrumentation - Emit LLVM code to call the specified
230/// instrumentation function with the current function and the call site, if
231/// function instrumentation is enabled.
232void CodeGenFunction::EmitFunctionInstrumentation(const char *Fn) {
233 if (!ShouldInstrumentFunction())
234 return;
235
Chris Lattner8dab6572010-06-23 05:21:28 +0000236 const llvm::PointerType *PointerTy;
Chris Lattner7255a2d2010-06-22 00:03:40 +0000237 const llvm::FunctionType *FunctionTy;
238 std::vector<const llvm::Type*> ProfileFuncArgs;
239
Chris Lattner8dab6572010-06-23 05:21:28 +0000240 // void __cyg_profile_func_{enter,exit} (void *this_fn, void *call_site);
241 PointerTy = llvm::Type::getInt8PtrTy(VMContext);
242 ProfileFuncArgs.push_back(PointerTy);
243 ProfileFuncArgs.push_back(PointerTy);
Chris Lattner7255a2d2010-06-22 00:03:40 +0000244 FunctionTy = llvm::FunctionType::get(
245 llvm::Type::getVoidTy(VMContext),
246 ProfileFuncArgs, false);
247
248 llvm::Constant *F = CGM.CreateRuntimeFunction(FunctionTy, Fn);
249 llvm::CallInst *CallSite = Builder.CreateCall(
250 CGM.getIntrinsic(llvm::Intrinsic::returnaddress, 0, 0),
Chris Lattner77b89b82010-06-27 07:15:29 +0000251 llvm::ConstantInt::get(Int32Ty, 0),
Chris Lattner7255a2d2010-06-22 00:03:40 +0000252 "callsite");
253
Chris Lattner8dab6572010-06-23 05:21:28 +0000254 Builder.CreateCall2(F,
255 llvm::ConstantExpr::getBitCast(CurFn, PointerTy),
256 CallSite);
Chris Lattner7255a2d2010-06-22 00:03:40 +0000257}
258
Anders Carlsson0ff8baf2009-09-11 00:07:24 +0000259void CodeGenFunction::StartFunction(GlobalDecl GD, QualType RetTy,
Daniel Dunbar7c086512008-09-09 23:14:03 +0000260 llvm::Function *Fn,
Daniel Dunbar2284ac92008-10-18 18:22:23 +0000261 const FunctionArgList &Args,
262 SourceLocation StartLoc) {
Anders Carlsson0ff8baf2009-09-11 00:07:24 +0000263 const Decl *D = GD.getDecl();
264
Anders Carlsson4cc1a472009-02-09 20:20:56 +0000265 DidCallStackSave = false;
Chris Lattnerb5437d22009-04-23 05:30:27 +0000266 CurCodeDecl = CurFuncDecl = D;
Daniel Dunbar7c086512008-09-09 23:14:03 +0000267 FnRetTy = RetTy;
Daniel Dunbarbd012ff2008-07-29 23:18:29 +0000268 CurFn = Fn;
Chris Lattner41110242008-06-17 18:05:57 +0000269 assert(CurFn->isDeclaration() && "Function already has body?");
270
Jakob Stoklund Olesena3fe2842010-02-09 00:10:00 +0000271 // Pass inline keyword to optimizer if it appears explicitly on any
272 // declaration.
273 if (const FunctionDecl *FD = dyn_cast_or_null<FunctionDecl>(D))
274 for (FunctionDecl::redecl_iterator RI = FD->redecls_begin(),
275 RE = FD->redecls_end(); RI != RE; ++RI)
276 if (RI->isInlineSpecified()) {
277 Fn->addFnAttr(llvm::Attribute::InlineHint);
278 break;
279 }
280
Daniel Dunbar55e87422008-11-11 02:29:29 +0000281 llvm::BasicBlock *EntryBB = createBasicBlock("entry", CurFn);
Daniel Dunbar5ca20842008-09-09 21:00:17 +0000282
Chris Lattner41110242008-06-17 18:05:57 +0000283 // Create a marker to make it easy to insert allocas into the entryblock
284 // later. Don't create this with the builder, because we don't want it
285 // folded.
Chris Lattner77b89b82010-06-27 07:15:29 +0000286 llvm::Value *Undef = llvm::UndefValue::get(Int32Ty);
287 AllocaInsertPt = new llvm::BitCastInst(Undef, Int32Ty, "", EntryBB);
Chris Lattnerf1466842009-03-22 00:24:14 +0000288 if (Builder.isNamePreserving())
289 AllocaInsertPt->setName("allocapt");
Mike Stump1eb44332009-09-09 15:08:12 +0000290
John McCallf1549f62010-07-06 01:34:17 +0000291 ReturnBlock = getJumpDestInCurrentScope("return");
Mike Stump1eb44332009-09-09 15:08:12 +0000292
Chris Lattner41110242008-06-17 18:05:57 +0000293 Builder.SetInsertPoint(EntryBB);
Mike Stump1eb44332009-09-09 15:08:12 +0000294
Douglas Gregorce056bc2010-02-21 22:15:06 +0000295 QualType FnType = getContext().getFunctionType(RetTy, 0, 0, false, 0,
296 false, false, 0, 0,
Rafael Espindola264ba482010-03-30 20:24:48 +0000297 /*FIXME?*/
298 FunctionType::ExtInfo());
Mike Stump91cc8152009-10-23 01:52:13 +0000299
Sanjiv Guptaaf994172008-07-04 11:04:26 +0000300 // Emit subprogram debug descriptor.
Anders Carlssone896d982009-02-13 08:11:52 +0000301 if (CGDebugInfo *DI = getDebugInfo()) {
Daniel Dunbar2284ac92008-10-18 18:22:23 +0000302 DI->setLocation(StartLoc);
Devang Patel9c6c3a02010-01-14 00:36:21 +0000303 DI->EmitFunctionStart(GD, FnType, CurFn, Builder);
Sanjiv Guptaaf994172008-07-04 11:04:26 +0000304 }
305
Chris Lattner7255a2d2010-06-22 00:03:40 +0000306 EmitFunctionInstrumentation("__cyg_profile_func_enter");
307
Daniel Dunbar88b53962009-02-02 22:03:45 +0000308 // FIXME: Leaked.
John McCall04a67a62010-02-05 21:31:56 +0000309 // CC info is ignored, hopefully?
310 CurFnInfo = &CGM.getTypes().getFunctionInfo(FnRetTy, Args,
Rafael Espindola264ba482010-03-30 20:24:48 +0000311 FunctionType::ExtInfo());
Eli Friedmanb17daf92009-12-04 02:43:40 +0000312
313 if (RetTy->isVoidType()) {
314 // Void type; nothing to return.
315 ReturnValue = 0;
316 } else if (CurFnInfo->getReturnInfo().getKind() == ABIArgInfo::Indirect &&
317 hasAggregateLLVMType(CurFnInfo->getReturnType())) {
318 // Indirect aggregate return; emit returned value directly into sret slot.
Daniel Dunbar647a1ec2010-02-16 19:45:20 +0000319 // This reduces code size, and affects correctness in C++.
Eli Friedmanb17daf92009-12-04 02:43:40 +0000320 ReturnValue = CurFn->arg_begin();
321 } else {
Daniel Dunbar647a1ec2010-02-16 19:45:20 +0000322 ReturnValue = CreateIRTemp(RetTy, "retval");
Eli Friedmanb17daf92009-12-04 02:43:40 +0000323 }
324
Mike Stumpcce3d4f2009-12-07 23:38:24 +0000325 EmitStartEHSpec(CurCodeDecl);
Daniel Dunbar88b53962009-02-02 22:03:45 +0000326 EmitFunctionProlog(*CurFnInfo, CurFn, Args);
Mike Stump1eb44332009-09-09 15:08:12 +0000327
John McCall25049412010-02-16 22:04:33 +0000328 if (CXXThisDecl)
329 CXXThisValue = Builder.CreateLoad(LocalDeclMap[CXXThisDecl], "this");
330 if (CXXVTTDecl)
331 CXXVTTValue = Builder.CreateLoad(LocalDeclMap[CXXVTTDecl], "vtt");
332
Anders Carlsson751358f2008-12-20 21:28:43 +0000333 // If any of the arguments have a variably modified type, make sure to
334 // emit the type size.
335 for (FunctionArgList::const_iterator i = Args.begin(), e = Args.end();
336 i != e; ++i) {
337 QualType Ty = i->second;
338
339 if (Ty->isVariablyModifiedType())
340 EmitVLASize(Ty);
341 }
Daniel Dunbar7c086512008-09-09 23:14:03 +0000342}
Eli Friedmaneb4b7052008-08-25 21:31:01 +0000343
John McCall9fc6a772010-02-19 09:25:03 +0000344void CodeGenFunction::EmitFunctionBody(FunctionArgList &Args) {
345 const FunctionDecl *FD = cast<FunctionDecl>(CurGD.getDecl());
Douglas Gregor06a9f362010-05-01 20:49:11 +0000346 assert(FD->getBody());
347 EmitStmt(FD->getBody());
John McCalla355e072010-02-18 03:17:58 +0000348}
349
Anders Carlssonc997d422010-01-02 01:01:18 +0000350void CodeGenFunction::GenerateCode(GlobalDecl GD, llvm::Function *Fn) {
Anders Carlsson0ff8baf2009-09-11 00:07:24 +0000351 const FunctionDecl *FD = cast<FunctionDecl>(GD.getDecl());
352
Anders Carlssone896d982009-02-13 08:11:52 +0000353 // Check if we should generate debug info for this function.
Mike Stump1feade82009-08-26 22:31:08 +0000354 if (CGM.getDebugInfo() && !FD->hasAttr<NoDebugAttr>())
Anders Carlssone896d982009-02-13 08:11:52 +0000355 DebugInfo = CGM.getDebugInfo();
Mike Stump1eb44332009-09-09 15:08:12 +0000356
Daniel Dunbar7c086512008-09-09 23:14:03 +0000357 FunctionArgList Args;
Mike Stump1eb44332009-09-09 15:08:12 +0000358
Mike Stump6a1e0eb2009-12-04 23:26:17 +0000359 CurGD = GD;
Anders Carlsson2b77ba82009-04-04 20:47:02 +0000360 if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(FD)) {
361 if (MD->isInstance()) {
362 // Create the implicit 'this' decl.
363 // FIXME: I'm not entirely sure I like using a fake decl just for code
364 // generation. Maybe we can come up with a better way?
John McCall25049412010-02-16 22:04:33 +0000365 CXXThisDecl = ImplicitParamDecl::Create(getContext(), 0,
366 FD->getLocation(),
Mike Stump1eb44332009-09-09 15:08:12 +0000367 &getContext().Idents.get("this"),
Anders Carlsson2b77ba82009-04-04 20:47:02 +0000368 MD->getThisType(getContext()));
369 Args.push_back(std::make_pair(CXXThisDecl, CXXThisDecl->getType()));
Anders Carlssonf6c56e22009-11-25 03:15:49 +0000370
371 // Check if we need a VTT parameter as well.
Anders Carlssonaf440352010-03-23 04:11:45 +0000372 if (CodeGenVTables::needsVTTParameter(GD)) {
Anders Carlssonf6c56e22009-11-25 03:15:49 +0000373 // FIXME: The comment about using a fake decl above applies here too.
374 QualType T = getContext().getPointerType(getContext().VoidPtrTy);
375 CXXVTTDecl =
John McCall25049412010-02-16 22:04:33 +0000376 ImplicitParamDecl::Create(getContext(), 0, FD->getLocation(),
Anders Carlssonf6c56e22009-11-25 03:15:49 +0000377 &getContext().Idents.get("vtt"), T);
378 Args.push_back(std::make_pair(CXXVTTDecl, CXXVTTDecl->getType()));
379 }
Anders Carlsson2b77ba82009-04-04 20:47:02 +0000380 }
381 }
Mike Stump1eb44332009-09-09 15:08:12 +0000382
Eli Friedmaneb4b7052008-08-25 21:31:01 +0000383 if (FD->getNumParams()) {
John McCall183700f2009-09-21 23:43:11 +0000384 const FunctionProtoType* FProto = FD->getType()->getAs<FunctionProtoType>();
Eli Friedmaneb4b7052008-08-25 21:31:01 +0000385 assert(FProto && "Function def must have prototype!");
Daniel Dunbar7c086512008-09-09 23:14:03 +0000386
387 for (unsigned i = 0, e = FD->getNumParams(); i != e; ++i)
Mike Stump1eb44332009-09-09 15:08:12 +0000388 Args.push_back(std::make_pair(FD->getParamDecl(i),
Daniel Dunbar7c086512008-09-09 23:14:03 +0000389 FProto->getArgType(i)));
Chris Lattner41110242008-06-17 18:05:57 +0000390 }
Daniel Dunbaraf05bb92008-08-26 08:29:31 +0000391
John McCalla355e072010-02-18 03:17:58 +0000392 SourceRange BodyRange;
393 if (Stmt *Body = FD->getBody()) BodyRange = Body->getSourceRange();
Anders Carlsson4365bba2009-11-06 02:55:43 +0000394
John McCalla355e072010-02-18 03:17:58 +0000395 // Emit the standard function prologue.
396 StartFunction(GD, FD->getResultType(), Fn, Args, BodyRange.getBegin());
Anders Carlsson4365bba2009-11-06 02:55:43 +0000397
John McCalla355e072010-02-18 03:17:58 +0000398 // Generate the body of the function.
John McCall9fc6a772010-02-19 09:25:03 +0000399 if (isa<CXXDestructorDecl>(FD))
400 EmitDestructorBody(Args);
401 else if (isa<CXXConstructorDecl>(FD))
402 EmitConstructorBody(Args);
403 else
404 EmitFunctionBody(Args);
Anders Carlsson1851a122010-02-07 19:45:40 +0000405
John McCalla355e072010-02-18 03:17:58 +0000406 // Emit the standard function epilogue.
407 FinishFunction(BodyRange.getEnd());
Mike Stump1eb44332009-09-09 15:08:12 +0000408
Anders Carlsson2b77ba82009-04-04 20:47:02 +0000409 // Destroy the 'this' declaration.
410 if (CXXThisDecl)
411 CXXThisDecl->Destroy(getContext());
Anders Carlssonf6c56e22009-11-25 03:15:49 +0000412
413 // Destroy the VTT declaration.
414 if (CXXVTTDecl)
415 CXXVTTDecl->Destroy(getContext());
Chris Lattner41110242008-06-17 18:05:57 +0000416}
417
Chris Lattner0946ccd2008-11-11 07:41:27 +0000418/// ContainsLabel - Return true if the statement contains a label in it. If
419/// this statement is not executed normally, it not containing a label means
420/// that we can just remove the code.
421bool CodeGenFunction::ContainsLabel(const Stmt *S, bool IgnoreCaseStmts) {
422 // Null statement, not a label!
423 if (S == 0) return false;
Mike Stump1eb44332009-09-09 15:08:12 +0000424
Chris Lattner0946ccd2008-11-11 07:41:27 +0000425 // If this is a label, we have to emit the code, consider something like:
426 // if (0) { ... foo: bar(); } goto foo;
427 if (isa<LabelStmt>(S))
428 return true;
Mike Stump1eb44332009-09-09 15:08:12 +0000429
Chris Lattner0946ccd2008-11-11 07:41:27 +0000430 // If this is a case/default statement, and we haven't seen a switch, we have
431 // to emit the code.
432 if (isa<SwitchCase>(S) && !IgnoreCaseStmts)
433 return true;
Mike Stump1eb44332009-09-09 15:08:12 +0000434
Chris Lattner0946ccd2008-11-11 07:41:27 +0000435 // If this is a switch statement, we want to ignore cases below it.
436 if (isa<SwitchStmt>(S))
437 IgnoreCaseStmts = true;
Mike Stump1eb44332009-09-09 15:08:12 +0000438
Chris Lattner0946ccd2008-11-11 07:41:27 +0000439 // Scan subexpressions for verboten labels.
440 for (Stmt::const_child_iterator I = S->child_begin(), E = S->child_end();
441 I != E; ++I)
442 if (ContainsLabel(*I, IgnoreCaseStmts))
443 return true;
Mike Stump1eb44332009-09-09 15:08:12 +0000444
Chris Lattner0946ccd2008-11-11 07:41:27 +0000445 return false;
446}
447
Chris Lattner31a09842008-11-12 08:04:58 +0000448
449/// ConstantFoldsToSimpleInteger - If the sepcified expression does not fold to
450/// a constant, or if it does but contains a label, return 0. If it constant
451/// folds to 'true' and does not contain a label, return 1, if it constant folds
452/// to 'false' and does not contain a label, return -1.
453int CodeGenFunction::ConstantFoldsToSimpleInteger(const Expr *Cond) {
Daniel Dunbar36bc14c2008-11-12 22:37:10 +0000454 // FIXME: Rename and handle conversion of other evaluatable things
455 // to bool.
Anders Carlsson64712f12008-12-01 02:46:24 +0000456 Expr::EvalResult Result;
Mike Stump1eb44332009-09-09 15:08:12 +0000457 if (!Cond->Evaluate(Result, getContext()) || !Result.Val.isInt() ||
Anders Carlsson64712f12008-12-01 02:46:24 +0000458 Result.HasSideEffects)
Anders Carlssonef5a66d2008-11-22 22:32:07 +0000459 return 0; // Not foldable, not integer or not fully evaluatable.
Mike Stump1eb44332009-09-09 15:08:12 +0000460
Chris Lattner31a09842008-11-12 08:04:58 +0000461 if (CodeGenFunction::ContainsLabel(Cond))
462 return 0; // Contains a label.
Mike Stump1eb44332009-09-09 15:08:12 +0000463
Anders Carlsson64712f12008-12-01 02:46:24 +0000464 return Result.Val.getInt().getBoolValue() ? 1 : -1;
Chris Lattner31a09842008-11-12 08:04:58 +0000465}
466
467
468/// EmitBranchOnBoolExpr - Emit a branch on a boolean condition (e.g. for an if
469/// statement) to the specified blocks. Based on the condition, this might try
470/// to simplify the codegen of the conditional based on the branch.
471///
472void CodeGenFunction::EmitBranchOnBoolExpr(const Expr *Cond,
473 llvm::BasicBlock *TrueBlock,
474 llvm::BasicBlock *FalseBlock) {
475 if (const ParenExpr *PE = dyn_cast<ParenExpr>(Cond))
476 return EmitBranchOnBoolExpr(PE->getSubExpr(), TrueBlock, FalseBlock);
Mike Stump1eb44332009-09-09 15:08:12 +0000477
Chris Lattner31a09842008-11-12 08:04:58 +0000478 if (const BinaryOperator *CondBOp = dyn_cast<BinaryOperator>(Cond)) {
479 // Handle X && Y in a condition.
480 if (CondBOp->getOpcode() == BinaryOperator::LAnd) {
481 // If we have "1 && X", simplify the code. "0 && X" would have constant
482 // folded if the case was simple enough.
483 if (ConstantFoldsToSimpleInteger(CondBOp->getLHS()) == 1) {
484 // br(1 && X) -> br(X).
485 return EmitBranchOnBoolExpr(CondBOp->getRHS(), TrueBlock, FalseBlock);
486 }
Mike Stump1eb44332009-09-09 15:08:12 +0000487
Chris Lattner31a09842008-11-12 08:04:58 +0000488 // If we have "X && 1", simplify the code to use an uncond branch.
489 // "X && 0" would have been constant folded to 0.
490 if (ConstantFoldsToSimpleInteger(CondBOp->getRHS()) == 1) {
491 // br(X && 1) -> br(X).
492 return EmitBranchOnBoolExpr(CondBOp->getLHS(), TrueBlock, FalseBlock);
493 }
Mike Stump1eb44332009-09-09 15:08:12 +0000494
Chris Lattner31a09842008-11-12 08:04:58 +0000495 // Emit the LHS as a conditional. If the LHS conditional is false, we
496 // want to jump to the FalseBlock.
Daniel Dunbar9615ecb2008-11-13 01:38:36 +0000497 llvm::BasicBlock *LHSTrue = createBasicBlock("land.lhs.true");
Chris Lattner31a09842008-11-12 08:04:58 +0000498 EmitBranchOnBoolExpr(CondBOp->getLHS(), LHSTrue, FalseBlock);
499 EmitBlock(LHSTrue);
Mike Stump1eb44332009-09-09 15:08:12 +0000500
Anders Carlsson08e9e452010-01-24 00:20:05 +0000501 // Any temporaries created here are conditional.
Anders Carlsson72119a82010-02-04 17:18:07 +0000502 BeginConditionalBranch();
Chris Lattner31a09842008-11-12 08:04:58 +0000503 EmitBranchOnBoolExpr(CondBOp->getRHS(), TrueBlock, FalseBlock);
Anders Carlsson72119a82010-02-04 17:18:07 +0000504 EndConditionalBranch();
Anders Carlsson08e9e452010-01-24 00:20:05 +0000505
Chris Lattner31a09842008-11-12 08:04:58 +0000506 return;
507 } else if (CondBOp->getOpcode() == BinaryOperator::LOr) {
508 // If we have "0 || X", simplify the code. "1 || X" would have constant
509 // folded if the case was simple enough.
510 if (ConstantFoldsToSimpleInteger(CondBOp->getLHS()) == -1) {
511 // br(0 || X) -> br(X).
512 return EmitBranchOnBoolExpr(CondBOp->getRHS(), TrueBlock, FalseBlock);
513 }
Mike Stump1eb44332009-09-09 15:08:12 +0000514
Chris Lattner31a09842008-11-12 08:04:58 +0000515 // If we have "X || 0", simplify the code to use an uncond branch.
516 // "X || 1" would have been constant folded to 1.
517 if (ConstantFoldsToSimpleInteger(CondBOp->getRHS()) == -1) {
518 // br(X || 0) -> br(X).
519 return EmitBranchOnBoolExpr(CondBOp->getLHS(), TrueBlock, FalseBlock);
520 }
Mike Stump1eb44332009-09-09 15:08:12 +0000521
Chris Lattner31a09842008-11-12 08:04:58 +0000522 // Emit the LHS as a conditional. If the LHS conditional is true, we
523 // want to jump to the TrueBlock.
Daniel Dunbar9615ecb2008-11-13 01:38:36 +0000524 llvm::BasicBlock *LHSFalse = createBasicBlock("lor.lhs.false");
Chris Lattner31a09842008-11-12 08:04:58 +0000525 EmitBranchOnBoolExpr(CondBOp->getLHS(), TrueBlock, LHSFalse);
526 EmitBlock(LHSFalse);
Mike Stump1eb44332009-09-09 15:08:12 +0000527
Anders Carlsson08e9e452010-01-24 00:20:05 +0000528 // Any temporaries created here are conditional.
Anders Carlsson72119a82010-02-04 17:18:07 +0000529 BeginConditionalBranch();
Chris Lattner31a09842008-11-12 08:04:58 +0000530 EmitBranchOnBoolExpr(CondBOp->getRHS(), TrueBlock, FalseBlock);
Anders Carlsson72119a82010-02-04 17:18:07 +0000531 EndConditionalBranch();
Anders Carlsson08e9e452010-01-24 00:20:05 +0000532
Chris Lattner31a09842008-11-12 08:04:58 +0000533 return;
534 }
Chris Lattner552f4c42008-11-12 08:13:36 +0000535 }
Mike Stump1eb44332009-09-09 15:08:12 +0000536
Chris Lattner552f4c42008-11-12 08:13:36 +0000537 if (const UnaryOperator *CondUOp = dyn_cast<UnaryOperator>(Cond)) {
538 // br(!x, t, f) -> br(x, f, t)
539 if (CondUOp->getOpcode() == UnaryOperator::LNot)
540 return EmitBranchOnBoolExpr(CondUOp->getSubExpr(), FalseBlock, TrueBlock);
Chris Lattner31a09842008-11-12 08:04:58 +0000541 }
Mike Stump1eb44332009-09-09 15:08:12 +0000542
Daniel Dunbar09b14892008-11-12 10:30:32 +0000543 if (const ConditionalOperator *CondOp = dyn_cast<ConditionalOperator>(Cond)) {
544 // Handle ?: operator.
545
546 // Just ignore GNU ?: extension.
547 if (CondOp->getLHS()) {
548 // br(c ? x : y, t, f) -> br(c, br(x, t, f), br(y, t, f))
549 llvm::BasicBlock *LHSBlock = createBasicBlock("cond.true");
550 llvm::BasicBlock *RHSBlock = createBasicBlock("cond.false");
551 EmitBranchOnBoolExpr(CondOp->getCond(), LHSBlock, RHSBlock);
552 EmitBlock(LHSBlock);
553 EmitBranchOnBoolExpr(CondOp->getLHS(), TrueBlock, FalseBlock);
554 EmitBlock(RHSBlock);
555 EmitBranchOnBoolExpr(CondOp->getRHS(), TrueBlock, FalseBlock);
556 return;
557 }
558 }
559
Chris Lattner31a09842008-11-12 08:04:58 +0000560 // Emit the code with the fully general case.
561 llvm::Value *CondV = EvaluateExprAsBool(Cond);
562 Builder.CreateCondBr(CondV, TrueBlock, FalseBlock);
563}
564
Daniel Dunbar488e9932008-08-16 00:56:44 +0000565/// ErrorUnsupported - Print out an error that codegen doesn't support the
Chris Lattnerdc5e8262007-12-02 01:43:38 +0000566/// specified stmt yet.
Daniel Dunbar90df4b62008-09-04 03:43:08 +0000567void CodeGenFunction::ErrorUnsupported(const Stmt *S, const char *Type,
568 bool OmitOnError) {
569 CGM.ErrorUnsupported(S, Type, OmitOnError);
Chris Lattnerdc5e8262007-12-02 01:43:38 +0000570}
571
Anders Carlsson1884eb02010-05-22 17:35:42 +0000572void
573CodeGenFunction::EmitNullInitialization(llvm::Value *DestPtr, QualType Ty) {
574 // If the type contains a pointer to data member we can't memset it to zero.
575 // Instead, create a null constant and copy it to the destination.
576 if (CGM.getTypes().ContainsPointerToDataMember(Ty)) {
577 llvm::Constant *NullConstant = CGM.EmitNullConstant(Ty);
578
579 llvm::GlobalVariable *NullVariable =
580 new llvm::GlobalVariable(CGM.getModule(), NullConstant->getType(),
581 /*isConstant=*/true,
582 llvm::GlobalVariable::PrivateLinkage,
583 NullConstant, llvm::Twine());
584 EmitAggregateCopy(DestPtr, NullVariable, Ty, /*isVolatile=*/false);
585 return;
586 }
587
588
Anders Carlsson0d7c5832010-05-03 01:20:20 +0000589 // Ignore empty classes in C++.
590 if (getContext().getLangOptions().CPlusPlus) {
591 if (const RecordType *RT = Ty->getAs<RecordType>()) {
592 if (cast<CXXRecordDecl>(RT->getDecl())->isEmpty())
593 return;
594 }
595 }
596
Anders Carlsson1884eb02010-05-22 17:35:42 +0000597 // Otherwise, just memset the whole thing to zero. This is legal
598 // because in LLVM, all default initializers (other than the ones we just
599 // handled above) are guaranteed to have a bit pattern of all zeros.
Chris Lattner36afd382009-10-13 06:02:42 +0000600 const llvm::Type *BP = llvm::Type::getInt8PtrTy(VMContext);
Anders Carlsson3d8400d2008-08-30 19:51:14 +0000601 if (DestPtr->getType() != BP)
602 DestPtr = Builder.CreateBitCast(DestPtr, BP, "tmp");
603
604 // Get size and alignment info for this aggregate.
605 std::pair<uint64_t, unsigned> TypeInfo = getContext().getTypeInfo(Ty);
606
Chris Lattner88207c92009-04-21 17:59:23 +0000607 // Don't bother emitting a zero-byte memset.
608 if (TypeInfo.first == 0)
609 return;
Mike Stump1eb44332009-09-09 15:08:12 +0000610
Anders Carlsson3d8400d2008-08-30 19:51:14 +0000611 // FIXME: Handle variable sized types.
Chris Lattner77b89b82010-06-27 07:15:29 +0000612 Builder.CreateCall5(CGM.getMemSetFn(BP, IntPtrTy), DestPtr,
Owen Anderson0032b272009-08-13 21:57:51 +0000613 llvm::Constant::getNullValue(llvm::Type::getInt8Ty(VMContext)),
Anders Carlsson3d8400d2008-08-30 19:51:14 +0000614 // TypeInfo.first describes size in bits.
Chris Lattner77b89b82010-06-27 07:15:29 +0000615 llvm::ConstantInt::get(IntPtrTy, TypeInfo.first/8),
616 llvm::ConstantInt::get(Int32Ty, TypeInfo.second/8),
Mon P Wang3ecd7852010-04-04 03:10:52 +0000617 llvm::ConstantInt::get(llvm::Type::getInt1Ty(VMContext),
618 0));
Anders Carlsson3d8400d2008-08-30 19:51:14 +0000619}
620
Chris Lattnerd9becd12009-10-28 23:59:40 +0000621llvm::BlockAddress *CodeGenFunction::GetAddrOfLabel(const LabelStmt *L) {
622 // Make sure that there is a block for the indirect goto.
623 if (IndirectBranch == 0)
624 GetIndirectGotoBlock();
Chris Lattner3d00fdc2009-10-13 06:55:33 +0000625
John McCallf1549f62010-07-06 01:34:17 +0000626 llvm::BasicBlock *BB = getJumpDestForLabel(L).Block;
Chris Lattner3d00fdc2009-10-13 06:55:33 +0000627
Chris Lattnerd9becd12009-10-28 23:59:40 +0000628 // Make sure the indirect branch includes all of the address-taken blocks.
629 IndirectBranch->addDestination(BB);
630 return llvm::BlockAddress::get(CurFn, BB);
Chris Lattner3d00fdc2009-10-13 06:55:33 +0000631}
632
633llvm::BasicBlock *CodeGenFunction::GetIndirectGotoBlock() {
Chris Lattnerd9becd12009-10-28 23:59:40 +0000634 // If we already made the indirect branch for indirect goto, return its block.
635 if (IndirectBranch) return IndirectBranch->getParent();
Chris Lattner3d00fdc2009-10-13 06:55:33 +0000636
Chris Lattnerd9becd12009-10-28 23:59:40 +0000637 CGBuilderTy TmpBuilder(createBasicBlock("indirectgoto"));
Chris Lattner3d00fdc2009-10-13 06:55:33 +0000638
Chris Lattnerd9becd12009-10-28 23:59:40 +0000639 const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(VMContext);
Chris Lattner85e74ac2009-10-28 20:36:47 +0000640
Chris Lattner3d00fdc2009-10-13 06:55:33 +0000641 // Create the PHI node that indirect gotos will add entries to.
Chris Lattnerd9becd12009-10-28 23:59:40 +0000642 llvm::Value *DestVal = TmpBuilder.CreatePHI(Int8PtrTy, "indirect.goto.dest");
Chris Lattner3d00fdc2009-10-13 06:55:33 +0000643
Chris Lattnerd9becd12009-10-28 23:59:40 +0000644 // Create the indirect branch instruction.
645 IndirectBranch = TmpBuilder.CreateIndirectBr(DestVal);
646 return IndirectBranch->getParent();
Daniel Dunbar0ffb1252008-08-04 16:51:22 +0000647}
Anders Carlssonddf7cac2008-11-04 05:30:00 +0000648
Daniel Dunbard286f052009-07-19 06:58:07 +0000649llvm::Value *CodeGenFunction::GetVLASize(const VariableArrayType *VAT) {
Eli Friedmanbbed6b92009-08-15 02:50:32 +0000650 llvm::Value *&SizeEntry = VLASizeMap[VAT->getSizeExpr()];
Mike Stump1eb44332009-09-09 15:08:12 +0000651
Anders Carlssonf666b772008-12-20 20:27:15 +0000652 assert(SizeEntry && "Did not emit size for type");
653 return SizeEntry;
654}
Anders Carlssondcc90d82008-12-12 07:19:02 +0000655
Daniel Dunbard286f052009-07-19 06:58:07 +0000656llvm::Value *CodeGenFunction::EmitVLASize(QualType Ty) {
Anders Carlsson60d35412008-12-20 20:46:34 +0000657 assert(Ty->isVariablyModifiedType() &&
658 "Must pass variably modified type to EmitVLASizes!");
Mike Stump1eb44332009-09-09 15:08:12 +0000659
Daniel Dunbard286f052009-07-19 06:58:07 +0000660 EnsureInsertPoint();
Mike Stump1eb44332009-09-09 15:08:12 +0000661
Anders Carlsson60d35412008-12-20 20:46:34 +0000662 if (const VariableArrayType *VAT = getContext().getAsVariableArrayType(Ty)) {
Eli Friedmanbbed6b92009-08-15 02:50:32 +0000663 llvm::Value *&SizeEntry = VLASizeMap[VAT->getSizeExpr()];
Mike Stump1eb44332009-09-09 15:08:12 +0000664
Anders Carlssonfcdbb932008-12-20 21:51:53 +0000665 if (!SizeEntry) {
Anders Carlsson96f21472009-02-05 19:43:10 +0000666 const llvm::Type *SizeTy = ConvertType(getContext().getSizeType());
Mike Stump1eb44332009-09-09 15:08:12 +0000667
Chris Lattnerec18ddd2009-08-15 00:03:43 +0000668 // Get the element size;
669 QualType ElemTy = VAT->getElementType();
670 llvm::Value *ElemSize;
Anders Carlssonfcdbb932008-12-20 21:51:53 +0000671 if (ElemTy->isVariableArrayType())
672 ElemSize = EmitVLASize(ElemTy);
Chris Lattnerec18ddd2009-08-15 00:03:43 +0000673 else
Owen Anderson4a28d5d2009-07-24 23:12:58 +0000674 ElemSize = llvm::ConstantInt::get(SizeTy,
Ken Dyck199c3d62010-01-11 17:06:35 +0000675 getContext().getTypeSizeInChars(ElemTy).getQuantity());
Mike Stump1eb44332009-09-09 15:08:12 +0000676
Anders Carlssonfcdbb932008-12-20 21:51:53 +0000677 llvm::Value *NumElements = EmitScalarExpr(VAT->getSizeExpr());
Anders Carlsson96f21472009-02-05 19:43:10 +0000678 NumElements = Builder.CreateIntCast(NumElements, SizeTy, false, "tmp");
Mike Stump1eb44332009-09-09 15:08:12 +0000679
Anders Carlssonfcdbb932008-12-20 21:51:53 +0000680 SizeEntry = Builder.CreateMul(ElemSize, NumElements);
Anders Carlsson60d35412008-12-20 20:46:34 +0000681 }
Mike Stump1eb44332009-09-09 15:08:12 +0000682
Anders Carlsson60d35412008-12-20 20:46:34 +0000683 return SizeEntry;
Anders Carlssondcc90d82008-12-12 07:19:02 +0000684 }
Mike Stump1eb44332009-09-09 15:08:12 +0000685
Chris Lattnerec18ddd2009-08-15 00:03:43 +0000686 if (const ArrayType *AT = dyn_cast<ArrayType>(Ty)) {
687 EmitVLASize(AT->getElementType());
688 return 0;
Mike Stump1eb44332009-09-09 15:08:12 +0000689 }
690
Chris Lattnerec18ddd2009-08-15 00:03:43 +0000691 const PointerType *PT = Ty->getAs<PointerType>();
692 assert(PT && "unknown VM type!");
693 EmitVLASize(PT->getPointeeType());
Anders Carlsson60d35412008-12-20 20:46:34 +0000694 return 0;
Anders Carlssondcc90d82008-12-12 07:19:02 +0000695}
Eli Friedman4fd0aa52009-01-20 17:46:04 +0000696
697llvm::Value* CodeGenFunction::EmitVAListRef(const Expr* E) {
Chris Lattnerfbe02ff2010-06-27 07:40:06 +0000698 if (CGM.getContext().getBuiltinVaListType()->isArrayType())
Eli Friedman4fd0aa52009-01-20 17:46:04 +0000699 return EmitScalarExpr(E);
Eli Friedman4fd0aa52009-01-20 17:46:04 +0000700 return EmitLValue(E).getAddress();
701}
Anders Carlsson6ccc4762009-02-07 22:53:43 +0000702
John McCallf1549f62010-07-06 01:34:17 +0000703/// Pops cleanup blocks until the given savepoint is reached.
704void CodeGenFunction::PopCleanupBlocks(EHScopeStack::stable_iterator Old) {
705 assert(Old.isValid());
706
707 EHScopeStack::iterator E = EHStack.find(Old);
708 while (EHStack.begin() != E)
709 PopCleanupBlock();
Anders Carlsson6ccc4762009-02-07 22:53:43 +0000710}
Anders Carlssonc71c8452009-02-07 23:50:39 +0000711
John McCallf1549f62010-07-06 01:34:17 +0000712/// Destroys a cleanup if it was unused.
713static void DestroyCleanup(CodeGenFunction &CGF,
714 llvm::BasicBlock *Entry,
715 llvm::BasicBlock *Exit) {
716 assert(Entry->use_empty() && "destroying cleanup with uses!");
717 assert(Exit->getTerminator() == 0 &&
718 "exit has terminator but entry has no predecessors!");
Mike Stump1eb44332009-09-09 15:08:12 +0000719
John McCallf1549f62010-07-06 01:34:17 +0000720 // This doesn't always remove the entire cleanup, but it's much
721 // safer as long as we don't know what blocks belong to the cleanup.
722 // A *much* better approach if we care about this inefficiency would
723 // be to lazily emit the cleanup.
724
725 // If the exit block is distinct from the entry, give it a branch to
726 // an unreachable destination. This preserves the well-formedness
727 // of the IR.
728 if (Entry != Exit)
729 llvm::BranchInst::Create(CGF.getUnreachableBlock(), Exit);
730
731 assert(!Entry->getParent() && "cleanup entry already positioned?");
John McCall66d80a92010-07-06 17:35:03 +0000732 // We can't just delete the entry; we have to kill any references to
733 // its instructions in other blocks.
734 for (llvm::BasicBlock::iterator I = Entry->begin(), E = Entry->end();
735 I != E; ++I)
736 if (!I->use_empty())
737 I->replaceAllUsesWith(llvm::UndefValue::get(I->getType()));
John McCallf1549f62010-07-06 01:34:17 +0000738 delete Entry;
Anders Carlssonc71c8452009-02-07 23:50:39 +0000739}
740
John McCallf1549f62010-07-06 01:34:17 +0000741/// Creates a switch instruction to thread branches out of the given
742/// block (which is the exit block of a cleanup).
743static void CreateCleanupSwitch(CodeGenFunction &CGF,
744 llvm::BasicBlock *Block) {
745 if (Block->getTerminator()) {
746 assert(isa<llvm::SwitchInst>(Block->getTerminator()) &&
747 "cleanup block already has a terminator, but it isn't a switch");
Mike Stump99533832009-12-02 07:41:41 +0000748 return;
749 }
750
John McCallf1549f62010-07-06 01:34:17 +0000751 llvm::Value *DestCodePtr
752 = CGF.CreateTempAlloca(CGF.Builder.getInt32Ty(), "cleanup.dst");
753 CGBuilderTy Builder(Block);
754 llvm::Value *DestCode = Builder.CreateLoad(DestCodePtr, "tmp");
Devang Patelcd9199e2010-04-13 00:08:43 +0000755
John McCallf1549f62010-07-06 01:34:17 +0000756 // Create a switch instruction to determine where to jump next.
757 Builder.CreateSwitch(DestCode, CGF.getUnreachableBlock());
Anders Carlssond66a9f92009-02-08 03:55:35 +0000758}
759
John McCallf1549f62010-07-06 01:34:17 +0000760/// Attempts to reduce a cleanup's entry block to a fallthrough. This
761/// is basically llvm::MergeBlockIntoPredecessor, except
762/// simplified/optimized for the tighter constraints on cleanup
763/// blocks.
764static void SimplifyCleanupEntry(CodeGenFunction &CGF,
765 llvm::BasicBlock *Entry) {
766 llvm::BasicBlock *Pred = Entry->getSinglePredecessor();
767 if (!Pred) return;
Mike Stump1eb44332009-09-09 15:08:12 +0000768
John McCallf1549f62010-07-06 01:34:17 +0000769 llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Pred->getTerminator());
770 if (!Br || Br->isConditional()) return;
771 assert(Br->getSuccessor(0) == Entry);
772
773 // If we were previously inserting at the end of the cleanup entry
774 // block, we'll need to continue inserting at the end of the
775 // predecessor.
776 bool WasInsertBlock = CGF.Builder.GetInsertBlock() == Entry;
777 assert(!WasInsertBlock || CGF.Builder.GetInsertPoint() == Entry->end());
778
779 // Kill the branch.
780 Br->eraseFromParent();
781
782 // Merge the blocks.
783 Pred->getInstList().splice(Pred->end(), Entry->getInstList());
784
785 // Kill the entry block.
786 Entry->eraseFromParent();
787
788 if (WasInsertBlock)
789 CGF.Builder.SetInsertPoint(Pred);
Anders Carlsson87eaf172009-02-08 00:50:42 +0000790}
791
John McCallf1549f62010-07-06 01:34:17 +0000792/// Attempts to reduce an cleanup's exit switch to an unconditional
793/// branch.
794static void SimplifyCleanupExit(llvm::BasicBlock *Exit) {
795 llvm::TerminatorInst *Terminator = Exit->getTerminator();
796 assert(Terminator && "completed cleanup exit has no terminator");
797
798 llvm::SwitchInst *Switch = dyn_cast<llvm::SwitchInst>(Terminator);
799 if (!Switch) return;
800 if (Switch->getNumCases() != 2) return; // default + 1
801
802 llvm::LoadInst *Cond = cast<llvm::LoadInst>(Switch->getCondition());
803 llvm::AllocaInst *CondVar = cast<llvm::AllocaInst>(Cond->getPointerOperand());
804
805 // Replace the switch instruction with an unconditional branch.
806 llvm::BasicBlock *Dest = Switch->getSuccessor(1); // default is 0
807 Switch->eraseFromParent();
808 llvm::BranchInst::Create(Dest, Exit);
809
810 // Delete all uses of the condition variable.
811 Cond->eraseFromParent();
812 while (!CondVar->use_empty())
813 cast<llvm::StoreInst>(*CondVar->use_begin())->eraseFromParent();
814
815 // Delete the condition variable itself.
816 CondVar->eraseFromParent();
817}
818
819/// Threads a branch fixup through a cleanup block.
820static void ThreadFixupThroughCleanup(CodeGenFunction &CGF,
821 BranchFixup &Fixup,
822 llvm::BasicBlock *Entry,
823 llvm::BasicBlock *Exit) {
824 if (!Exit->getTerminator())
825 CreateCleanupSwitch(CGF, Exit);
826
827 // Find the switch and its destination index alloca.
828 llvm::SwitchInst *Switch = cast<llvm::SwitchInst>(Exit->getTerminator());
829 llvm::Value *DestCodePtr =
830 cast<llvm::LoadInst>(Switch->getCondition())->getPointerOperand();
831
832 // Compute the index of the new case we're adding to the switch.
833 unsigned Index = Switch->getNumCases();
834
835 const llvm::IntegerType *i32 = llvm::Type::getInt32Ty(CGF.getLLVMContext());
836 llvm::ConstantInt *IndexV = llvm::ConstantInt::get(i32, Index);
837
838 // Set the index in the origin block.
839 new llvm::StoreInst(IndexV, DestCodePtr, Fixup.Origin);
840
841 // Add a case to the switch.
842 Switch->addCase(IndexV, Fixup.Destination);
843
844 // Change the last branch to point to the cleanup entry block.
845 Fixup.LatestBranch->setSuccessor(Fixup.LatestBranchIndex, Entry);
846
847 // And finally, update the fixup.
848 Fixup.LatestBranch = Switch;
849 Fixup.LatestBranchIndex = Index;
850}
851
852/// Try to simplify both the entry and exit edges of a cleanup.
853static void SimplifyCleanupEdges(CodeGenFunction &CGF,
854 llvm::BasicBlock *Entry,
855 llvm::BasicBlock *Exit) {
856
857 // Given their current implementations, it's important to run these
858 // in this order: SimplifyCleanupEntry will delete Entry if it can
859 // be merged into its predecessor, which will then break
860 // SimplifyCleanupExit if (as is common) Entry == Exit.
861
862 SimplifyCleanupExit(Exit);
863 SimplifyCleanupEntry(CGF, Entry);
864}
865
John McCallda65ea82010-07-13 20:32:21 +0000866static void EmitLazyCleanup(CodeGenFunction &CGF,
867 EHScopeStack::LazyCleanup *Fn,
868 bool ForEH) {
869 if (ForEH) CGF.EHStack.pushTerminate();
870 Fn->Emit(CGF, ForEH);
871 if (ForEH) CGF.EHStack.popTerminate();
872 assert(CGF.HaveInsertPoint() && "cleanup ended with no insertion point?");
873}
874
875static void SplitAndEmitLazyCleanup(CodeGenFunction &CGF,
876 EHScopeStack::LazyCleanup *Fn,
877 bool ForEH,
878 llvm::BasicBlock *Entry) {
879 assert(Entry && "no entry block for cleanup");
880
881 // Remove the switch and load from the end of the entry block.
882 llvm::Instruction *Switch = &Entry->getInstList().back();
883 Entry->getInstList().remove(Switch);
884 assert(isa<llvm::SwitchInst>(Switch));
885 llvm::Instruction *Load = &Entry->getInstList().back();
886 Entry->getInstList().remove(Load);
887 assert(isa<llvm::LoadInst>(Load));
888
889 assert(Entry->getInstList().empty() &&
890 "lazy cleanup block not empty after removing load/switch pair?");
891
892 // Emit the actual cleanup at the end of the entry block.
893 CGF.Builder.SetInsertPoint(Entry);
894 EmitLazyCleanup(CGF, Fn, ForEH);
895
896 // Put the load and switch at the end of the exit block.
897 llvm::BasicBlock *Exit = CGF.Builder.GetInsertBlock();
898 Exit->getInstList().push_back(Load);
899 Exit->getInstList().push_back(Switch);
900
901 // Clean up the edges if possible.
902 SimplifyCleanupEdges(CGF, Entry, Exit);
903
904 CGF.Builder.ClearInsertionPoint();
905}
906
907static void PopLazyCleanupBlock(CodeGenFunction &CGF) {
908 assert(isa<EHLazyCleanupScope>(*CGF.EHStack.begin()) && "top not a cleanup!");
909 EHLazyCleanupScope &Scope = cast<EHLazyCleanupScope>(*CGF.EHStack.begin());
910 assert(Scope.getFixupDepth() <= CGF.EHStack.getNumBranchFixups());
911
912 // Check whether we need an EH cleanup. This is only true if we've
913 // generated a lazy EH cleanup block.
914 llvm::BasicBlock *EHEntry = Scope.getEHBlock();
915 bool RequiresEHCleanup = (EHEntry != 0);
916
917 // Check the three conditions which might require a normal cleanup:
918
919 // - whether there are branch fix-ups through this cleanup
920 unsigned FixupDepth = Scope.getFixupDepth();
921 bool HasFixups = CGF.EHStack.getNumBranchFixups() != FixupDepth;
922
923 // - whether control has already been threaded through this cleanup
924 llvm::BasicBlock *NormalEntry = Scope.getNormalBlock();
925 bool HasExistingBranches = (NormalEntry != 0);
926
927 // - whether there's a fallthrough
928 llvm::BasicBlock *FallthroughSource = CGF.Builder.GetInsertBlock();
929 bool HasFallthrough = (FallthroughSource != 0);
930
931 bool RequiresNormalCleanup = false;
932 if (Scope.isNormalCleanup() &&
933 (HasFixups || HasExistingBranches || HasFallthrough)) {
934 RequiresNormalCleanup = true;
935 }
936
937 // If we don't need the cleanup at all, we're done.
938 if (!RequiresNormalCleanup && !RequiresEHCleanup) {
939 CGF.EHStack.popCleanup();
940 assert(CGF.EHStack.getNumBranchFixups() == 0 ||
941 CGF.EHStack.hasNormalCleanups());
942 return;
943 }
944
945 // Copy the cleanup emission data out. Note that SmallVector
946 // guarantees maximal alignment for its buffer regardless of its
947 // type parameter.
948 llvm::SmallVector<char, 8*sizeof(void*)> CleanupBuffer;
949 CleanupBuffer.reserve(Scope.getCleanupSize());
950 memcpy(CleanupBuffer.data(),
951 Scope.getCleanupBuffer(), Scope.getCleanupSize());
952 CleanupBuffer.set_size(Scope.getCleanupSize());
953 EHScopeStack::LazyCleanup *Fn =
954 reinterpret_cast<EHScopeStack::LazyCleanup*>(CleanupBuffer.data());
955
956 // We're done with the scope; pop it off so we can emit the cleanups.
957 CGF.EHStack.popCleanup();
958
959 if (RequiresNormalCleanup) {
960 // If we have a fallthrough and no other need for the cleanup,
961 // emit it directly.
962 if (HasFallthrough && !HasFixups && !HasExistingBranches) {
963 EmitLazyCleanup(CGF, Fn, /*ForEH*/ false);
964
965 // Otherwise, the best approach is to thread everything through
966 // the cleanup block and then try to clean up after ourselves.
967 } else {
968 // Force the entry block to exist.
969 if (!HasExistingBranches) {
970 NormalEntry = CGF.createBasicBlock("cleanup");
971 CreateCleanupSwitch(CGF, NormalEntry);
972 }
973
974 CGF.EmitBlock(NormalEntry);
975
976 // Thread the fallthrough edge through the (momentarily trivial)
977 // cleanup.
978 llvm::BasicBlock *FallthroughDestination = 0;
979 if (HasFallthrough) {
980 assert(isa<llvm::BranchInst>(FallthroughSource->getTerminator()));
981 FallthroughDestination = CGF.createBasicBlock("cleanup.cont");
982
983 BranchFixup Fix;
984 Fix.Destination = FallthroughDestination;
985 Fix.LatestBranch = FallthroughSource->getTerminator();
986 Fix.LatestBranchIndex = 0;
987 Fix.Origin = Fix.LatestBranch;
988
989 // Restore fixup invariant. EmitBlock added a branch to the
990 // cleanup which we need to redirect to the destination.
991 cast<llvm::BranchInst>(Fix.LatestBranch)
992 ->setSuccessor(0, Fix.Destination);
993
994 ThreadFixupThroughCleanup(CGF, Fix, NormalEntry, NormalEntry);
995 }
996
997 // Thread any "real" fixups we need to thread.
998 for (unsigned I = FixupDepth, E = CGF.EHStack.getNumBranchFixups();
999 I != E; ++I)
1000 if (CGF.EHStack.getBranchFixup(I).Destination)
1001 ThreadFixupThroughCleanup(CGF, CGF.EHStack.getBranchFixup(I),
1002 NormalEntry, NormalEntry);
1003
1004 SplitAndEmitLazyCleanup(CGF, Fn, /*ForEH*/ false, NormalEntry);
1005
1006 if (HasFallthrough)
1007 CGF.EmitBlock(FallthroughDestination);
1008 }
1009 }
1010
1011 // Emit the EH cleanup if required.
1012 if (RequiresEHCleanup) {
1013 CGBuilderTy::InsertPoint SavedIP = CGF.Builder.saveAndClearIP();
1014 CGF.EmitBlock(EHEntry);
1015 SplitAndEmitLazyCleanup(CGF, Fn, /*ForEH*/ true, EHEntry);
1016 CGF.Builder.restoreIP(SavedIP);
1017 }
1018}
1019
John McCallf1549f62010-07-06 01:34:17 +00001020/// Pops a cleanup block. If the block includes a normal cleanup, the
1021/// current insertion point is threaded through the cleanup, as are
1022/// any branch fixups on the cleanup.
1023void CodeGenFunction::PopCleanupBlock() {
1024 assert(!EHStack.empty() && "cleanup stack is empty!");
John McCallda65ea82010-07-13 20:32:21 +00001025 if (isa<EHLazyCleanupScope>(*EHStack.begin()))
1026 return PopLazyCleanupBlock(*this);
1027
John McCallf1549f62010-07-06 01:34:17 +00001028 assert(isa<EHCleanupScope>(*EHStack.begin()) && "top not a cleanup!");
1029 EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.begin());
1030 assert(Scope.getFixupDepth() <= EHStack.getNumBranchFixups());
1031
1032 // Handle the EH cleanup if (1) there is one and (2) it's different
1033 // from the normal cleanup.
1034 if (Scope.isEHCleanup() &&
1035 Scope.getEHEntry() != Scope.getNormalEntry()) {
1036 llvm::BasicBlock *EHEntry = Scope.getEHEntry();
1037 llvm::BasicBlock *EHExit = Scope.getEHExit();
1038
1039 if (EHEntry->use_empty()) {
1040 DestroyCleanup(*this, EHEntry, EHExit);
1041 } else {
1042 // TODO: this isn't really the ideal location to put this EH
1043 // cleanup, but lazy emission is a better solution than trying
1044 // to pick a better spot.
1045 CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP();
1046 EmitBlock(EHEntry);
1047 Builder.restoreIP(SavedIP);
1048
1049 SimplifyCleanupEdges(*this, EHEntry, EHExit);
1050 }
1051 }
1052
1053 // If we only have an EH cleanup, we don't really need to do much
1054 // here. Branch fixups just naturally drop down to the enclosing
1055 // cleanup scope.
1056 if (!Scope.isNormalCleanup()) {
1057 EHStack.popCleanup();
1058 assert(EHStack.getNumBranchFixups() == 0 || EHStack.hasNormalCleanups());
1059 return;
1060 }
1061
1062 // Check whether the scope has any fixups that need to be threaded.
1063 unsigned FixupDepth = Scope.getFixupDepth();
1064 bool HasFixups = EHStack.getNumBranchFixups() != FixupDepth;
1065
1066 // Grab the entry and exit blocks.
1067 llvm::BasicBlock *Entry = Scope.getNormalEntry();
1068 llvm::BasicBlock *Exit = Scope.getNormalExit();
1069
1070 // Check whether anything's been threaded through the cleanup already.
1071 assert((Exit->getTerminator() == 0) == Entry->use_empty() &&
1072 "cleanup entry/exit mismatch");
1073 bool HasExistingBranches = !Entry->use_empty();
1074
1075 // Check whether we need to emit a "fallthrough" branch through the
1076 // cleanup for the current insertion point.
1077 llvm::BasicBlock *FallThrough = Builder.GetInsertBlock();
1078 if (FallThrough && FallThrough->getTerminator())
1079 FallThrough = 0;
1080
1081 // If *nothing* is using the cleanup, kill it.
1082 if (!FallThrough && !HasFixups && !HasExistingBranches) {
1083 EHStack.popCleanup();
1084 DestroyCleanup(*this, Entry, Exit);
1085 return;
1086 }
1087
1088 // Otherwise, add the block to the function.
1089 EmitBlock(Entry);
1090
1091 if (FallThrough)
1092 Builder.SetInsertPoint(Exit);
1093 else
1094 Builder.ClearInsertionPoint();
1095
1096 // Fast case: if we don't have to add any fixups, and either
1097 // we don't have a fallthrough or the cleanup wasn't previously
1098 // used, then the setup above is sufficient.
1099 if (!HasFixups) {
1100 if (!FallThrough) {
1101 assert(HasExistingBranches && "no reason for cleanup but didn't kill before");
1102 EHStack.popCleanup();
1103 SimplifyCleanupEdges(*this, Entry, Exit);
1104 return;
1105 } else if (!HasExistingBranches) {
1106 assert(FallThrough && "no reason for cleanup but didn't kill before");
1107 // We can't simplify the exit edge in this case because we're
1108 // already inserting at the end of the exit block.
1109 EHStack.popCleanup();
1110 SimplifyCleanupEntry(*this, Entry);
1111 return;
1112 }
1113 }
1114
1115 // Otherwise we're going to have to thread things through the cleanup.
1116 llvm::SmallVector<BranchFixup*, 8> Fixups;
1117
1118 // Synthesize a fixup for the current insertion point.
1119 BranchFixup Cur;
1120 if (FallThrough) {
1121 Cur.Destination = createBasicBlock("cleanup.cont");
1122 Cur.LatestBranch = FallThrough->getTerminator();
1123 Cur.LatestBranchIndex = 0;
1124 Cur.Origin = Cur.LatestBranch;
1125
1126 // Restore fixup invariant. EmitBlock added a branch to the cleanup
1127 // which we need to redirect to the destination.
1128 cast<llvm::BranchInst>(Cur.LatestBranch)->setSuccessor(0, Cur.Destination);
1129
1130 Fixups.push_back(&Cur);
1131 } else {
1132 Cur.Destination = 0;
1133 }
1134
1135 // Collect any "real" fixups we need to thread.
1136 for (unsigned I = FixupDepth, E = EHStack.getNumBranchFixups();
1137 I != E; ++I)
1138 if (EHStack.getBranchFixup(I).Destination)
1139 Fixups.push_back(&EHStack.getBranchFixup(I));
1140
1141 assert(!Fixups.empty() && "no fixups, invariants broken!");
1142
1143 // If there's only a single fixup to thread through, do so with
1144 // unconditional branches. This only happens if there's a single
1145 // branch and no fallthrough.
1146 if (Fixups.size() == 1 && !HasExistingBranches) {
1147 Fixups[0]->LatestBranch->setSuccessor(Fixups[0]->LatestBranchIndex, Entry);
1148 llvm::BranchInst *Br =
1149 llvm::BranchInst::Create(Fixups[0]->Destination, Exit);
1150 Fixups[0]->LatestBranch = Br;
1151 Fixups[0]->LatestBranchIndex = 0;
1152
1153 // Otherwise, force a switch statement and thread everything through
1154 // the switch.
1155 } else {
1156 CreateCleanupSwitch(*this, Exit);
1157 for (unsigned I = 0, E = Fixups.size(); I != E; ++I)
1158 ThreadFixupThroughCleanup(*this, *Fixups[I], Entry, Exit);
1159 }
1160
1161 // Emit the fallthrough destination block if necessary.
1162 if (Cur.Destination)
1163 EmitBlock(Cur.Destination);
1164
1165 // We're finally done with the cleanup.
1166 EHStack.popCleanup();
1167}
1168
1169void CodeGenFunction::EmitBranchThroughCleanup(JumpDest Dest) {
Anders Carlsson46831a92009-02-08 22:13:37 +00001170 if (!HaveInsertPoint())
1171 return;
Mike Stump1eb44332009-09-09 15:08:12 +00001172
John McCallf1549f62010-07-06 01:34:17 +00001173 // Create the branch.
1174 llvm::BranchInst *BI = Builder.CreateBr(Dest.Block);
Mike Stump1eb44332009-09-09 15:08:12 +00001175
John McCallf1549f62010-07-06 01:34:17 +00001176 // If we're not in a cleanup scope, we don't need to worry about
1177 // fixups.
1178 if (!EHStack.hasNormalCleanups()) {
1179 Builder.ClearInsertionPoint();
1180 return;
1181 }
1182
1183 // Initialize a fixup.
1184 BranchFixup Fixup;
1185 Fixup.Destination = Dest.Block;
1186 Fixup.Origin = BI;
1187 Fixup.LatestBranch = BI;
1188 Fixup.LatestBranchIndex = 0;
1189
1190 // If we can't resolve the destination cleanup scope, just add this
1191 // to the current cleanup scope.
1192 if (!Dest.ScopeDepth.isValid()) {
1193 EHStack.addBranchFixup() = Fixup;
1194 Builder.ClearInsertionPoint();
1195 return;
1196 }
1197
1198 for (EHScopeStack::iterator I = EHStack.begin(),
1199 E = EHStack.find(Dest.ScopeDepth); I != E; ++I) {
1200 if (isa<EHCleanupScope>(*I)) {
1201 EHCleanupScope &Scope = cast<EHCleanupScope>(*I);
1202 if (Scope.isNormalCleanup())
1203 ThreadFixupThroughCleanup(*this, Fixup, Scope.getNormalEntry(),
1204 Scope.getNormalExit());
John McCallda65ea82010-07-13 20:32:21 +00001205 } else if (isa<EHLazyCleanupScope>(*I)) {
1206 EHLazyCleanupScope &Scope = cast<EHLazyCleanupScope>(*I);
1207 if (Scope.isNormalCleanup()) {
1208 llvm::BasicBlock *Block = Scope.getNormalBlock();
1209 if (!Block) {
1210 Block = createBasicBlock("cleanup");
1211 Scope.setNormalBlock(Block);
1212 }
1213 ThreadFixupThroughCleanup(*this, Fixup, Block, Block);
1214 }
John McCallf1549f62010-07-06 01:34:17 +00001215 }
1216 }
1217
Anders Carlsson46831a92009-02-08 22:13:37 +00001218 Builder.ClearInsertionPoint();
John McCallf1549f62010-07-06 01:34:17 +00001219}
Mike Stump1eb44332009-09-09 15:08:12 +00001220
John McCallf1549f62010-07-06 01:34:17 +00001221void CodeGenFunction::EmitBranchThroughEHCleanup(JumpDest Dest) {
1222 if (!HaveInsertPoint())
Anders Carlsson87eaf172009-02-08 00:50:42 +00001223 return;
Mike Stump1eb44332009-09-09 15:08:12 +00001224
John McCallf1549f62010-07-06 01:34:17 +00001225 // Create the branch.
1226 llvm::BranchInst *BI = Builder.CreateBr(Dest.Block);
1227
1228 // If we're not in a cleanup scope, we don't need to worry about
1229 // fixups.
1230 if (!EHStack.hasEHCleanups()) {
1231 Builder.ClearInsertionPoint();
Anders Carlsson87eaf172009-02-08 00:50:42 +00001232 return;
1233 }
Mike Stump1eb44332009-09-09 15:08:12 +00001234
John McCallf1549f62010-07-06 01:34:17 +00001235 // Initialize a fixup.
1236 BranchFixup Fixup;
1237 Fixup.Destination = Dest.Block;
1238 Fixup.Origin = BI;
1239 Fixup.LatestBranch = BI;
1240 Fixup.LatestBranchIndex = 0;
1241
1242 // We should never get invalid scope depths for these: invalid scope
1243 // depths only arise for as-yet-unemitted labels, and we can't do an
1244 // EH-unwind to one of those.
1245 assert(Dest.ScopeDepth.isValid() && "invalid scope depth on EH dest?");
1246
1247 for (EHScopeStack::iterator I = EHStack.begin(),
1248 E = EHStack.find(Dest.ScopeDepth); I != E; ++I) {
1249 if (isa<EHCleanupScope>(*I)) {
1250 EHCleanupScope &Scope = cast<EHCleanupScope>(*I);
1251 if (Scope.isEHCleanup())
1252 ThreadFixupThroughCleanup(*this, Fixup, Scope.getEHEntry(),
1253 Scope.getEHExit());
John McCallda65ea82010-07-13 20:32:21 +00001254 } else if (isa<EHLazyCleanupScope>(*I)) {
1255 EHLazyCleanupScope &Scope = cast<EHLazyCleanupScope>(*I);
1256 if (Scope.isEHCleanup()) {
1257 llvm::BasicBlock *Block = Scope.getEHBlock();
1258 if (!Block) {
1259 Block = createBasicBlock("eh.cleanup");
1260 Scope.setEHBlock(Block);
1261 }
1262 ThreadFixupThroughCleanup(*this, Fixup, Block, Block);
1263 }
John McCallf1549f62010-07-06 01:34:17 +00001264 }
Anders Carlsson87eaf172009-02-08 00:50:42 +00001265 }
John McCallf1549f62010-07-06 01:34:17 +00001266
1267 Builder.ClearInsertionPoint();
Anders Carlsson87eaf172009-02-08 00:50:42 +00001268}