Whenever explicitly activating or deactivating a cleanup, we
need to provide a 'dominating IP' which is guaranteed to
dominate the (de)activation point but which cannot be avoided
along any execution path from the (de)activation point to
the push-point of the cleanup. Using the entry block is
bad mojo.
git-svn-id: https://llvm.org/svn/llvm-project/cfe/trunk@144276 91177308-0d34-0410-b5e6-96231b3b80d8
diff --git a/lib/CodeGen/CGBlocks.cpp b/lib/CodeGen/CGBlocks.cpp
index 9825c1e..e713492 100644
--- a/lib/CodeGen/CGBlocks.cpp
+++ b/lib/CodeGen/CGBlocks.cpp
@@ -27,7 +27,8 @@
CGBlockInfo::CGBlockInfo(const BlockDecl *block, StringRef name)
: Name(name), CXXThisIndex(0), CanBeGlobal(false), NeedsCopyDispose(false),
- HasCXXObject(false), UsesStret(false), StructureType(0), Block(block) {
+ HasCXXObject(false), UsesStret(false), StructureType(0), Block(block),
+ DominatingIP(0) {
// Skip asm prefix, if any. 'name' is usually taken directly from
// the mangled name of the enclosing function.
@@ -541,6 +542,10 @@
llvm::Value *addr = CGF.Builder.CreateStructGEP(blockInfo.Address,
capture.getIndex());
+ // We can use that GEP as the dominating IP.
+ if (!blockInfo.DominatingIP)
+ blockInfo.DominatingIP = cast<llvm::Instruction>(addr);
+
CleanupKind cleanupKind = InactiveNormalCleanup;
bool useArrayEHCleanup = CGF.needsEHCleanup(dtorKind);
if (useArrayEHCleanup)
@@ -749,7 +754,7 @@
if (!ci->isByRef()) {
EHScopeStack::stable_iterator cleanup = capture.getCleanup();
if (cleanup.isValid())
- ActivateCleanupBlock(cleanup);
+ ActivateCleanupBlock(cleanup, blockInfo.DominatingIP);
}
}
diff --git a/lib/CodeGen/CGBlocks.h b/lib/CodeGen/CGBlocks.h
index 69f3355..095cfdb 100644
--- a/lib/CodeGen/CGBlocks.h
+++ b/lib/CodeGen/CGBlocks.h
@@ -192,6 +192,15 @@
const BlockExpr *BlockExpression;
CharUnits BlockSize;
CharUnits BlockAlign;
+
+ /// An instruction which dominates the full-expression that the
+ /// block is inside.
+ llvm::Instruction *DominatingIP;
+
+ /// The next block in the block-info chain. Invalid if this block
+ /// info is not part of the CGF's block-info chain, which is true
+ /// if it corresponds to a global block or a block whose expression
+ /// has been encountered.
CGBlockInfo *NextBlockInfo;
const Capture &getCapture(const VarDecl *var) const {
diff --git a/lib/CodeGen/CGCleanup.cpp b/lib/CodeGen/CGCleanup.cpp
index 9e079c6..f9ffc93 100644
--- a/lib/CodeGen/CGCleanup.cpp
+++ b/lib/CodeGen/CGCleanup.cpp
@@ -251,8 +251,7 @@
// Initialize it to false at a site that's guaranteed to be run
// before each evaluation.
- llvm::BasicBlock *block = OutermostConditional->getStartingBlock();
- new llvm::StoreInst(Builder.getFalse(), active, &block->back());
+ setBeforeOutermostConditional(Builder.getFalse(), active);
// Initialize it to true at the current location.
Builder.CreateStore(Builder.getTrue(), active);
@@ -1000,14 +999,15 @@
/// extra uses *after* the change-over point.
static void SetupCleanupBlockActivation(CodeGenFunction &CGF,
EHScopeStack::stable_iterator C,
- ForActivation_t Kind) {
+ ForActivation_t kind,
+ llvm::Instruction *dominatingIP) {
EHCleanupScope &Scope = cast<EHCleanupScope>(*CGF.EHStack.find(C));
// We always need the flag if we're activating the cleanup in a
// conditional context, because we have to assume that the current
// location doesn't necessarily dominate the cleanup's code.
bool isActivatedInConditional =
- (Kind == ForActivation && CGF.isInConditionalBranch());
+ (kind == ForActivation && CGF.isInConditionalBranch());
bool needFlag = false;
@@ -1030,32 +1030,44 @@
// If it hasn't yet been used as either, we're done.
if (!needFlag) return;
- llvm::AllocaInst *Var = Scope.getActiveFlag();
- if (!Var) {
- Var = CGF.CreateTempAlloca(CGF.Builder.getInt1Ty(), "cleanup.isactive");
- Scope.setActiveFlag(Var);
+ llvm::AllocaInst *var = Scope.getActiveFlag();
+ if (!var) {
+ var = CGF.CreateTempAlloca(CGF.Builder.getInt1Ty(), "cleanup.isactive");
+ Scope.setActiveFlag(var);
+
+ assert(dominatingIP && "no existing variable and no dominating IP!");
// Initialize to true or false depending on whether it was
// active up to this point.
- CGF.InitTempAlloca(Var, CGF.Builder.getInt1(Kind == ForDeactivation));
+ llvm::Value *value = CGF.Builder.getInt1(kind == ForDeactivation);
+
+ // If we're in a conditional block, ignore the dominating IP and
+ // use the outermost conditional branch.
+ if (CGF.isInConditionalBranch()) {
+ CGF.setBeforeOutermostConditional(value, var);
+ } else {
+ new llvm::StoreInst(value, var, dominatingIP);
+ }
}
- CGF.Builder.CreateStore(CGF.Builder.getInt1(Kind == ForActivation), Var);
+ CGF.Builder.CreateStore(CGF.Builder.getInt1(kind == ForActivation), var);
}
/// Activate a cleanup that was created in an inactivated state.
-void CodeGenFunction::ActivateCleanupBlock(EHScopeStack::stable_iterator C) {
+void CodeGenFunction::ActivateCleanupBlock(EHScopeStack::stable_iterator C,
+ llvm::Instruction *dominatingIP) {
assert(C != EHStack.stable_end() && "activating bottom of stack?");
EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(C));
assert(!Scope.isActive() && "double activation");
- SetupCleanupBlockActivation(*this, C, ForActivation);
+ SetupCleanupBlockActivation(*this, C, ForActivation, dominatingIP);
Scope.setActive(true);
}
/// Deactive a cleanup that was created in an active state.
-void CodeGenFunction::DeactivateCleanupBlock(EHScopeStack::stable_iterator C) {
+void CodeGenFunction::DeactivateCleanupBlock(EHScopeStack::stable_iterator C,
+ llvm::Instruction *dominatingIP) {
assert(C != EHStack.stable_end() && "deactivating bottom of stack?");
EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(C));
assert(Scope.isActive() && "double deactivation");
@@ -1071,7 +1083,7 @@
}
// Otherwise, follow the general case.
- SetupCleanupBlockActivation(*this, C, ForDeactivation);
+ SetupCleanupBlockActivation(*this, C, ForDeactivation, dominatingIP);
Scope.setActive(false);
}
diff --git a/lib/CodeGen/CGException.cpp b/lib/CodeGen/CGException.cpp
index 5e4fb98..15fc6a1 100644
--- a/lib/CodeGen/CGException.cpp
+++ b/lib/CodeGen/CGException.cpp
@@ -359,7 +359,7 @@
/*IsInit*/ true);
// Deactivate the cleanup block.
- CGF.DeactivateCleanupBlock(cleanup);
+ CGF.DeactivateCleanupBlock(cleanup, cast<llvm::Instruction>(typedAddr));
}
llvm::Value *CodeGenFunction::getExceptionSlot() {
diff --git a/lib/CodeGen/CGExprAgg.cpp b/lib/CodeGen/CGExprAgg.cpp
index f3e86fb..bc8b081 100644
--- a/lib/CodeGen/CGExprAgg.cpp
+++ b/lib/CodeGen/CGExprAgg.cpp
@@ -687,6 +687,7 @@
QualType::DestructionKind dtorKind = elementType.isDestructedType();
llvm::AllocaInst *endOfInit = 0;
EHScopeStack::stable_iterator cleanup;
+ llvm::Instruction *cleanupDominator = 0;
if (CGF.needsEHCleanup(dtorKind)) {
// In principle we could tell the cleanup where we are more
// directly, but the control flow can get so varied here that it
@@ -694,7 +695,7 @@
// alloca.
endOfInit = CGF.CreateTempAlloca(begin->getType(),
"arrayinit.endOfInit");
- Builder.CreateStore(begin, endOfInit);
+ cleanupDominator = Builder.CreateStore(begin, endOfInit);
CGF.pushIrregularPartialArrayCleanup(begin, endOfInit, elementType,
CGF.getDestroyer(dtorKind));
cleanup = CGF.EHStack.stable_begin();
@@ -794,7 +795,7 @@
}
// Leave the partial-array cleanup if we entered one.
- if (dtorKind) CGF.DeactivateCleanupBlock(cleanup);
+ if (dtorKind) CGF.DeactivateCleanupBlock(cleanup, cleanupDominator);
return;
}
@@ -843,6 +844,7 @@
// We'll need to enter cleanup scopes in case any of the member
// initializers throw an exception.
SmallVector<EHScopeStack::stable_iterator, 16> cleanups;
+ llvm::Instruction *cleanupDominator = 0;
// Here we iterate over the fields; this makes it simpler to both
// default-initialize fields and skip over unnamed fields.
@@ -886,6 +888,9 @@
= field->getType().isDestructedType()) {
assert(LV.isSimple());
if (CGF.needsEHCleanup(dtorKind)) {
+ if (!cleanupDominator)
+ cleanupDominator = CGF.Builder.CreateUnreachable(); // placeholder
+
CGF.pushDestroy(EHCleanup, LV.getAddress(), field->getType(),
CGF.getDestroyer(dtorKind), false);
cleanups.push_back(CGF.EHStack.stable_begin());
@@ -905,7 +910,11 @@
// Deactivate all the partial cleanups in reverse order, which
// generally means popping them.
for (unsigned i = cleanups.size(); i != 0; --i)
- CGF.DeactivateCleanupBlock(cleanups[i-1]);
+ CGF.DeactivateCleanupBlock(cleanups[i-1], cleanupDominator);
+
+ // Destroy the placeholder if we made one.
+ if (cleanupDominator)
+ cleanupDominator->eraseFromParent();
}
//===----------------------------------------------------------------------===//
diff --git a/lib/CodeGen/CGExprCXX.cpp b/lib/CodeGen/CGExprCXX.cpp
index 78db590..99ebad1 100644
--- a/lib/CodeGen/CGExprCXX.cpp
+++ b/lib/CodeGen/CGExprCXX.cpp
@@ -816,18 +816,22 @@
// Enter a partial-destruction cleanup if necessary.
QualType::DestructionKind dtorKind = elementType.isDestructedType();
EHScopeStack::stable_iterator cleanup;
+ llvm::Instruction *cleanupDominator = 0;
if (needsEHCleanup(dtorKind)) {
pushRegularPartialArrayCleanup(beginPtr, curPtr, elementType,
getDestroyer(dtorKind));
cleanup = EHStack.stable_begin();
+ cleanupDominator = Builder.CreateUnreachable();
}
// Emit the initializer into this element.
StoreAnyExprIntoOneUnit(*this, E, curPtr);
// Leave the cleanup if we entered one.
- if (cleanup != EHStack.stable_end())
- DeactivateCleanupBlock(cleanup);
+ if (cleanup != EHStack.stable_end()) {
+ DeactivateCleanupBlock(cleanup, cleanupDominator);
+ cleanupDominator->eraseFromParent();
+ }
// Advance to the next element.
llvm::Value *nextPtr = Builder.CreateConstGEP1_32(curPtr, 1, "array.next");
@@ -1057,7 +1061,7 @@
DominatingValue<RValue>::save(CGF, RValue::get(AllocSize));
CallDeleteDuringConditionalNew *Cleanup = CGF.EHStack
- .pushCleanupWithExtra<CallDeleteDuringConditionalNew>(InactiveEHCleanup,
+ .pushCleanupWithExtra<CallDeleteDuringConditionalNew>(EHCleanup,
E->getNumPlacementArgs(),
E->getOperatorDelete(),
SavedNewPtr,
@@ -1066,7 +1070,7 @@
Cleanup->setPlacementArg(I,
DominatingValue<RValue>::save(CGF, NewArgs[I+1].RV));
- CGF.ActivateCleanupBlock(CGF.EHStack.stable_begin());
+ CGF.initFullExprCleanup();
}
llvm::Value *CodeGenFunction::EmitCXXNewExpr(const CXXNewExpr *E) {
@@ -1168,10 +1172,12 @@
// If there's an operator delete, enter a cleanup to call it if an
// exception is thrown.
EHScopeStack::stable_iterator operatorDeleteCleanup;
+ llvm::Instruction *cleanupDominator = 0;
if (E->getOperatorDelete() &&
!E->getOperatorDelete()->isReservedGlobalPlacementOperator()) {
EnterNewDeleteCleanup(*this, E, allocation, allocSize, allocatorArgs);
operatorDeleteCleanup = EHStack.stable_begin();
+ cleanupDominator = Builder.CreateUnreachable();
}
assert((allocSize == allocSizeWithoutCookie) ==
@@ -1200,8 +1206,10 @@
// Deactivate the 'operator delete' cleanup if we finished
// initialization.
- if (operatorDeleteCleanup.isValid())
- DeactivateCleanupBlock(operatorDeleteCleanup);
+ if (operatorDeleteCleanup.isValid()) {
+ DeactivateCleanupBlock(operatorDeleteCleanup, cleanupDominator);
+ cleanupDominator->eraseFromParent();
+ }
if (nullCheck) {
conditional.end(*this);
diff --git a/lib/CodeGen/CodeGenFunction.h b/lib/CodeGen/CodeGenFunction.h
index c3aca51..09b8b2b 100644
--- a/lib/CodeGen/CodeGenFunction.h
+++ b/lib/CodeGen/CodeGenFunction.h
@@ -630,10 +630,6 @@
llvm::BasicBlock *getInvokeDestImpl();
- /// Set up the last cleaup that was pushed as a conditional
- /// full-expression cleanup.
- void initFullExprCleanup();
-
template <class T>
typename DominatingValue<T>::saved_type saveValueInCond(T value) {
return DominatingValue<T>::save(*this, value);
@@ -744,6 +740,10 @@
initFullExprCleanup();
}
+ /// Set up the last cleaup that was pushed as a conditional
+ /// full-expression cleanup.
+ void initFullExprCleanup();
+
/// PushDestructorCleanup - Push a cleanup to call the
/// complete-object destructor of an object of the given type at the
/// given address. Does nothing if T is not a C++ class type with a
@@ -763,11 +763,23 @@
/// DeactivateCleanupBlock - Deactivates the given cleanup block.
/// The block cannot be reactivated. Pops it if it's the top of the
/// stack.
- void DeactivateCleanupBlock(EHScopeStack::stable_iterator Cleanup);
+ ///
+ /// \param DominatingIP - An instruction which is known to
+ /// dominate the current IP (if set) and which lies along
+ /// all paths of execution between the current IP and the
+ /// the point at which the cleanup comes into scope.
+ void DeactivateCleanupBlock(EHScopeStack::stable_iterator Cleanup,
+ llvm::Instruction *DominatingIP);
/// ActivateCleanupBlock - Activates an initially-inactive cleanup.
/// Cannot be used to resurrect a deactivated cleanup.
- void ActivateCleanupBlock(EHScopeStack::stable_iterator Cleanup);
+ ///
+ /// \param DominatingIP - An instruction which is known to
+ /// dominate the current IP (if set) and which lies along
+ /// all paths of execution between the current IP and the
+ /// the point at which the cleanup comes into scope.
+ void ActivateCleanupBlock(EHScopeStack::stable_iterator Cleanup,
+ llvm::Instruction *DominatingIP);
/// \brief Enters a new scope for capturing cleanups, all of which
/// will be executed once the scope is exited.
@@ -923,6 +935,12 @@
/// one branch or the other of a conditional expression.
bool isInConditionalBranch() const { return OutermostConditional != 0; }
+ void setBeforeOutermostConditional(llvm::Value *value, llvm::Value *addr) {
+ assert(isInConditionalBranch());
+ llvm::BasicBlock *block = OutermostConditional->getStartingBlock();
+ new llvm::StoreInst(value, addr, &block->back());
+ }
+
/// An RAII object to record that we're evaluating a statement
/// expression.
class StmtExprEvaluation {