[clang] Re-apply "Emit missing cleanups for stmt-expr" and other commits (PR #89154)
Utkarsh Saxena via cfe-commits
cfe-commits at lists.llvm.org
Mon Apr 29 03:25:17 PDT 2024
https://github.com/usx95 updated https://github.com/llvm/llvm-project/pull/89154
>From f1ab4c2677394bbfc985d9680d5eecd7b2e6a882 Mon Sep 17 00:00:00 2001
From: Utkarsh Saxena <usx at google.com>
Date: Wed, 17 Apr 2024 22:47:36 +0000
Subject: [PATCH 1/4] Reapply "[codegen] Emit missing cleanups for stmt-expr
and coro suspensions" and related commits (#88884)
This reverts commit 9d8be2408768912dc113a342050049231e4fc8d1.
---
clang/lib/CodeGen/CGCall.cpp | 13 +-
clang/lib/CodeGen/CGCleanup.cpp | 49 ++-
clang/lib/CodeGen/CGCleanup.h | 57 ++-
clang/lib/CodeGen/CGDecl.cpp | 61 ++-
clang/lib/CodeGen/CGExpr.cpp | 12 +-
clang/lib/CodeGen/CGExprAgg.cpp | 87 ++--
clang/lib/CodeGen/CGExprCXX.cpp | 38 +-
clang/lib/CodeGen/CodeGenFunction.cpp | 6 +
clang/lib/CodeGen/CodeGenFunction.h | 99 ++++-
.../CodeGenCXX/control-flow-in-stmt-expr.cpp | 409 ++++++++++++++++++
.../coro-suspend-cleanups.cpp | 93 ++++
11 files changed, 796 insertions(+), 128 deletions(-)
create mode 100644 clang/test/CodeGenCXX/control-flow-in-stmt-expr.cpp
create mode 100644 clang/test/CodeGenCoroutines/coro-suspend-cleanups.cpp
diff --git a/clang/lib/CodeGen/CGCall.cpp b/clang/lib/CodeGen/CGCall.cpp
index 3f5463a9a70e9d..9004e96bc1a0cf 100644
--- a/clang/lib/CodeGen/CGCall.cpp
+++ b/clang/lib/CodeGen/CGCall.cpp
@@ -4693,11 +4693,11 @@ void CodeGenFunction::EmitCallArg(CallArgList &args, const Expr *E,
AggValueSlot Slot = args.isUsingInAlloca()
? createPlaceholderSlot(*this, type) : CreateAggTemp(type, "agg.tmp");
- bool DestroyedInCallee = true, NeedsEHCleanup = true;
+ bool DestroyedInCallee = true, NeedsCleanup = true;
if (const auto *RD = type->getAsCXXRecordDecl())
DestroyedInCallee = RD->hasNonTrivialDestructor();
else
- NeedsEHCleanup = needsEHCleanup(type.isDestructedType());
+ NeedsCleanup = type.isDestructedType();
if (DestroyedInCallee)
Slot.setExternallyDestructed();
@@ -4706,14 +4706,15 @@ void CodeGenFunction::EmitCallArg(CallArgList &args, const Expr *E,
RValue RV = Slot.asRValue();
args.add(RV, type);
- if (DestroyedInCallee && NeedsEHCleanup) {
+ if (DestroyedInCallee && NeedsCleanup) {
// Create a no-op GEP between the placeholder and the cleanup so we can
// RAUW it successfully. It also serves as a marker of the first
// instruction where the cleanup is active.
- pushFullExprCleanup<DestroyUnpassedArg>(EHCleanup, Slot.getAddress(),
- type);
+ pushFullExprCleanup<DestroyUnpassedArg>(NormalAndEHCleanup,
+ Slot.getAddress(), type);
// This unreachable is a temporary marker which will be removed later.
- llvm::Instruction *IsActive = Builder.CreateUnreachable();
+ llvm::Instruction *IsActive =
+ Builder.CreateFlagLoad(llvm::Constant::getNullValue(Int8PtrTy));
args.addArgCleanupDeactivation(EHStack.stable_begin(), IsActive);
}
return;
diff --git a/clang/lib/CodeGen/CGCleanup.cpp b/clang/lib/CodeGen/CGCleanup.cpp
index e6f8e6873004f2..8683f19d9da28e 100644
--- a/clang/lib/CodeGen/CGCleanup.cpp
+++ b/clang/lib/CodeGen/CGCleanup.cpp
@@ -634,12 +634,19 @@ static void destroyOptimisticNormalEntry(CodeGenFunction &CGF,
/// Pops a cleanup block. If the block includes a normal cleanup, the
/// current insertion point is threaded through the cleanup, as are
/// any branch fixups on the cleanup.
-void CodeGenFunction::PopCleanupBlock(bool FallthroughIsBranchThrough) {
+void CodeGenFunction::PopCleanupBlock(bool FallthroughIsBranchThrough,
+ bool ForDeactivation) {
assert(!EHStack.empty() && "cleanup stack is empty!");
assert(isa<EHCleanupScope>(*EHStack.begin()) && "top not a cleanup!");
EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.begin());
assert(Scope.getFixupDepth() <= EHStack.getNumBranchFixups());
+ // If we are deactivating a normal cleanup, we need to pretend that the
+ // fallthrough is unreachable. We restore this IP before returning.
+ CGBuilderTy::InsertPoint NormalDeactivateOrigIP;
+ if (ForDeactivation && (Scope.isNormalCleanup() || !getLangOpts().EHAsynch)) {
+ NormalDeactivateOrigIP = Builder.saveAndClearIP();
+ }
// Remember activation information.
bool IsActive = Scope.isActive();
Address NormalActiveFlag =
@@ -667,7 +674,8 @@ void CodeGenFunction::PopCleanupBlock(bool FallthroughIsBranchThrough) {
// - whether there's a fallthrough
llvm::BasicBlock *FallthroughSource = Builder.GetInsertBlock();
- bool HasFallthrough = (FallthroughSource != nullptr && IsActive);
+ bool HasFallthrough =
+ FallthroughSource != nullptr && (IsActive || HasExistingBranches);
// Branch-through fall-throughs leave the insertion point set to the
// end of the last cleanup, which points to the current scope. The
@@ -692,7 +700,11 @@ void CodeGenFunction::PopCleanupBlock(bool FallthroughIsBranchThrough) {
// If we have a prebranched fallthrough into an inactive normal
// cleanup, rewrite it so that it leads to the appropriate place.
- if (Scope.isNormalCleanup() && HasPrebranchedFallthrough && !IsActive) {
+ if (Scope.isNormalCleanup() && HasPrebranchedFallthrough &&
+ !RequiresNormalCleanup) {
+ // FIXME: Come up with a program which would need forwarding prebranched
+ // fallthrough and add tests. Otherwise delete this and assert against it.
+ assert(!IsActive);
llvm::BasicBlock *prebranchDest;
// If the prebranch is semantically branching through the next
@@ -724,6 +736,8 @@ void CodeGenFunction::PopCleanupBlock(bool FallthroughIsBranchThrough) {
EHStack.popCleanup(); // safe because there are no fixups
assert(EHStack.getNumBranchFixups() == 0 ||
EHStack.hasNormalCleanups());
+ if (NormalDeactivateOrigIP.isSet())
+ Builder.restoreIP(NormalDeactivateOrigIP);
return;
}
@@ -760,11 +774,19 @@ void CodeGenFunction::PopCleanupBlock(bool FallthroughIsBranchThrough) {
if (!RequiresNormalCleanup) {
// Mark CPP scope end for passed-by-value Arg temp
// per Windows ABI which is "normally" Cleanup in callee
- if (IsEHa && getInvokeDest() && Builder.GetInsertBlock()) {
- if (Personality.isMSVCXXPersonality())
+ if (IsEHa && getInvokeDest()) {
+ // If we are deactivating a normal cleanup then we don't have a
+ // fallthrough. Restore original IP to emit CPP scope ends in the correct
+ // block.
+ if (NormalDeactivateOrigIP.isSet())
+ Builder.restoreIP(NormalDeactivateOrigIP);
+ if (Personality.isMSVCXXPersonality() && Builder.GetInsertBlock())
EmitSehCppScopeEnd();
+ if (NormalDeactivateOrigIP.isSet())
+ NormalDeactivateOrigIP = Builder.saveAndClearIP();
}
destroyOptimisticNormalEntry(*this, Scope);
+ Scope.MarkEmitted();
EHStack.popCleanup();
} else {
// If we have a fallthrough and no other need for the cleanup,
@@ -781,6 +803,7 @@ void CodeGenFunction::PopCleanupBlock(bool FallthroughIsBranchThrough) {
}
destroyOptimisticNormalEntry(*this, Scope);
+ Scope.MarkEmitted();
EHStack.popCleanup();
EmitCleanup(*this, Fn, cleanupFlags, NormalActiveFlag);
@@ -916,6 +939,7 @@ void CodeGenFunction::PopCleanupBlock(bool FallthroughIsBranchThrough) {
}
// IV. Pop the cleanup and emit it.
+ Scope.MarkEmitted();
EHStack.popCleanup();
assert(EHStack.hasNormalCleanups() == HasEnclosingCleanups);
@@ -984,6 +1008,8 @@ void CodeGenFunction::PopCleanupBlock(bool FallthroughIsBranchThrough) {
}
}
+ if (NormalDeactivateOrigIP.isSet())
+ Builder.restoreIP(NormalDeactivateOrigIP);
assert(EHStack.hasNormalCleanups() || EHStack.getNumBranchFixups() == 0);
// Emit the EH cleanup if required.
@@ -1273,17 +1299,8 @@ void CodeGenFunction::DeactivateCleanupBlock(EHScopeStack::stable_iterator C,
// to the current RunCleanupsScope.
if (C == EHStack.stable_begin() &&
CurrentCleanupScopeDepth.strictlyEncloses(C)) {
- // Per comment below, checking EHAsynch is not really necessary
- // it's there to assure zero-impact w/o EHAsynch option
- if (!Scope.isNormalCleanup() && getLangOpts().EHAsynch) {
- PopCleanupBlock();
- } else {
- // If it's a normal cleanup, we need to pretend that the
- // fallthrough is unreachable.
- CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP();
- PopCleanupBlock();
- Builder.restoreIP(SavedIP);
- }
+ PopCleanupBlock(/*FallthroughIsBranchThrough=*/false,
+ /*ForDeactivation=*/true);
return;
}
diff --git a/clang/lib/CodeGen/CGCleanup.h b/clang/lib/CodeGen/CGCleanup.h
index 03e4a29d7b3dbf..c73c97146abc4d 100644
--- a/clang/lib/CodeGen/CGCleanup.h
+++ b/clang/lib/CodeGen/CGCleanup.h
@@ -16,8 +16,11 @@
#include "EHScopeStack.h"
#include "Address.h"
+#include "llvm/ADT/STLExtras.h"
+#include "llvm/ADT/SetVector.h"
#include "llvm/ADT/SmallPtrSet.h"
#include "llvm/ADT/SmallVector.h"
+#include "llvm/IR/Instruction.h"
namespace llvm {
class BasicBlock;
@@ -266,6 +269,51 @@ class alignas(8) EHCleanupScope : public EHScope {
};
mutable struct ExtInfo *ExtInfo;
+ /// Erases auxillary allocas and their usages for an unused cleanup.
+ /// Cleanups should mark these allocas as 'used' if the cleanup is
+ /// emitted, otherwise these instructions would be erased.
+ struct AuxillaryAllocas {
+ SmallVector<llvm::Instruction *, 1> AuxAllocas;
+ bool used = false;
+
+ // Records a potentially unused instruction to be erased later.
+ void Add(llvm::AllocaInst *Alloca) { AuxAllocas.push_back(Alloca); }
+
+ // Mark all recorded instructions as used. These will not be erased later.
+ void MarkUsed() {
+ used = true;
+ AuxAllocas.clear();
+ }
+
+ ~AuxillaryAllocas() {
+ if (used)
+ return;
+ llvm::SetVector<llvm::Instruction *> Uses;
+ for (auto *Inst : llvm::reverse(AuxAllocas))
+ CollectUses(Inst, Uses);
+ // Delete uses in the reverse order of insertion.
+ for (auto *I : llvm::reverse(Uses))
+ I->eraseFromParent();
+ }
+
+ private:
+ void CollectUses(llvm::Instruction *I,
+ llvm::SetVector<llvm::Instruction *> &Uses) {
+ if (!I || !Uses.insert(I))
+ return;
+ for (auto *User : I->users())
+ CollectUses(cast<llvm::Instruction>(User), Uses);
+ }
+ };
+ mutable struct AuxillaryAllocas *AuxAllocas;
+
+ AuxillaryAllocas &getAuxillaryAllocas() {
+ if (!AuxAllocas) {
+ AuxAllocas = new struct AuxillaryAllocas();
+ }
+ return *AuxAllocas;
+ }
+
/// The number of fixups required by enclosing scopes (not including
/// this one). If this is the top cleanup scope, all the fixups
/// from this index onwards belong to this scope.
@@ -298,7 +346,7 @@ class alignas(8) EHCleanupScope : public EHScope {
EHScopeStack::stable_iterator enclosingEH)
: EHScope(EHScope::Cleanup, enclosingEH),
EnclosingNormal(enclosingNormal), NormalBlock(nullptr),
- ActiveFlag(Address::invalid()), ExtInfo(nullptr),
+ ActiveFlag(Address::invalid()), ExtInfo(nullptr), AuxAllocas(nullptr),
FixupDepth(fixupDepth) {
CleanupBits.IsNormalCleanup = isNormal;
CleanupBits.IsEHCleanup = isEH;
@@ -312,8 +360,15 @@ class alignas(8) EHCleanupScope : public EHScope {
}
void Destroy() {
+ if (AuxAllocas)
+ delete AuxAllocas;
delete ExtInfo;
}
+ void AddAuxAllocas(llvm::SmallVector<llvm::AllocaInst *> Allocas) {
+ for (auto *Alloca : Allocas)
+ getAuxillaryAllocas().Add(Alloca);
+ }
+ void MarkEmitted() { getAuxillaryAllocas().MarkUsed(); }
// Objects of EHCleanupScope are not destructed. Use Destroy().
~EHCleanupScope() = delete;
diff --git a/clang/lib/CodeGen/CGDecl.cpp b/clang/lib/CodeGen/CGDecl.cpp
index ce6d6d8956076e..3f05ebb561da57 100644
--- a/clang/lib/CodeGen/CGDecl.cpp
+++ b/clang/lib/CodeGen/CGDecl.cpp
@@ -19,6 +19,7 @@
#include "CodeGenFunction.h"
#include "CodeGenModule.h"
#include "ConstantEmitter.h"
+#include "EHScopeStack.h"
#include "PatternInit.h"
#include "TargetInfo.h"
#include "clang/AST/ASTContext.h"
@@ -2201,6 +2202,27 @@ void CodeGenFunction::pushDestroy(CleanupKind cleanupKind, Address addr,
destroyer, useEHCleanupForArray);
}
+// Pushes a destroy and defers its deactivation until its
+// CleanupDeactivationScope is exited.
+void CodeGenFunction::pushDestroyAndDeferDeactivation(
+ QualType::DestructionKind dtorKind, Address addr, QualType type) {
+ assert(dtorKind && "cannot push destructor for trivial type");
+
+ CleanupKind cleanupKind = getCleanupKind(dtorKind);
+ pushDestroyAndDeferDeactivation(
+ cleanupKind, addr, type, getDestroyer(dtorKind), cleanupKind & EHCleanup);
+}
+
+void CodeGenFunction::pushDestroyAndDeferDeactivation(
+ CleanupKind cleanupKind, Address addr, QualType type, Destroyer *destroyer,
+ bool useEHCleanupForArray) {
+ llvm::Instruction *DominatingIP =
+ Builder.CreateFlagLoad(llvm::Constant::getNullValue(Int8PtrTy));
+ pushDestroy(cleanupKind, addr, type, destroyer, useEHCleanupForArray);
+ DeferredDeactivationCleanupStack.push_back(
+ {EHStack.stable_begin(), DominatingIP});
+}
+
void CodeGenFunction::pushStackRestore(CleanupKind Kind, Address SPMem) {
EHStack.pushCleanup<CallStackRestore>(Kind, SPMem);
}
@@ -2217,16 +2239,19 @@ void CodeGenFunction::pushLifetimeExtendedDestroy(CleanupKind cleanupKind,
// If we're not in a conditional branch, we don't need to bother generating a
// conditional cleanup.
if (!isInConditionalBranch()) {
- // Push an EH-only cleanup for the object now.
// FIXME: When popping normal cleanups, we need to keep this EH cleanup
// around in case a temporary's destructor throws an exception.
- if (cleanupKind & EHCleanup)
- EHStack.pushCleanup<DestroyObject>(
- static_cast<CleanupKind>(cleanupKind & ~NormalCleanup), addr, type,
- destroyer, useEHCleanupForArray);
+ // Add the cleanup to the EHStack. After the full-expr, this would be
+ // deactivated before being popped from the stack.
+ pushDestroyAndDeferDeactivation(cleanupKind, addr, type, destroyer,
+ useEHCleanupForArray);
+
+ // Since this is lifetime-extended, push it once again to the EHStack after
+ // the full expression.
return pushCleanupAfterFullExprWithActiveFlag<DestroyObject>(
- cleanupKind, Address::invalid(), addr, type, destroyer, useEHCleanupForArray);
+ cleanupKind, Address::invalid(), addr, type, destroyer,
+ useEHCleanupForArray);
}
// Otherwise, we should only destroy the object if it's been initialized.
@@ -2241,13 +2266,12 @@ void CodeGenFunction::pushLifetimeExtendedDestroy(CleanupKind cleanupKind,
Address ActiveFlag = createCleanupActiveFlag();
SavedType SavedAddr = saveValueInCond(addr);
- if (cleanupKind & EHCleanup) {
- EHStack.pushCleanup<ConditionalCleanupType>(
- static_cast<CleanupKind>(cleanupKind & ~NormalCleanup), SavedAddr, type,
- destroyer, useEHCleanupForArray);
- initFullExprCleanupWithFlag(ActiveFlag);
- }
+ pushCleanupAndDeferDeactivation<ConditionalCleanupType>(
+ cleanupKind, SavedAddr, type, destroyer, useEHCleanupForArray);
+ initFullExprCleanupWithFlag(ActiveFlag);
+ // Since this is lifetime-extended, push it once again to the EHStack after
+ // the full expression.
pushCleanupAfterFullExprWithActiveFlag<ConditionalCleanupType>(
cleanupKind, ActiveFlag, SavedAddr, type, destroyer,
useEHCleanupForArray);
@@ -2442,9 +2466,9 @@ namespace {
};
} // end anonymous namespace
-/// pushIrregularPartialArrayCleanup - Push an EH cleanup to destroy
-/// already-constructed elements of the given array. The cleanup
-/// may be popped with DeactivateCleanupBlock or PopCleanupBlock.
+/// pushIrregularPartialArrayCleanup - Push a NormalAndEHCleanup to
+/// destroy already-constructed elements of the given array. The cleanup may be
+/// popped with DeactivateCleanupBlock or PopCleanupBlock.
///
/// \param elementType - the immediate element type of the array;
/// possibly still an array type
@@ -2453,10 +2477,9 @@ void CodeGenFunction::pushIrregularPartialArrayCleanup(llvm::Value *arrayBegin,
QualType elementType,
CharUnits elementAlign,
Destroyer *destroyer) {
- pushFullExprCleanup<IrregularPartialArrayDestroy>(EHCleanup,
- arrayBegin, arrayEndPointer,
- elementType, elementAlign,
- destroyer);
+ pushFullExprCleanup<IrregularPartialArrayDestroy>(
+ NormalAndEHCleanup, arrayBegin, arrayEndPointer, elementType,
+ elementAlign, destroyer);
}
/// pushRegularPartialArrayCleanup - Push an EH cleanup to destroy
diff --git a/clang/lib/CodeGen/CGExpr.cpp b/clang/lib/CodeGen/CGExpr.cpp
index cf696a1c9f560f..c85a339f5e3f88 100644
--- a/clang/lib/CodeGen/CGExpr.cpp
+++ b/clang/lib/CodeGen/CGExpr.cpp
@@ -115,10 +115,16 @@ RawAddress CodeGenFunction::CreateTempAlloca(llvm::Type *Ty, CharUnits Align,
llvm::AllocaInst *CodeGenFunction::CreateTempAlloca(llvm::Type *Ty,
const Twine &Name,
llvm::Value *ArraySize) {
+ llvm::AllocaInst *Alloca;
if (ArraySize)
- return Builder.CreateAlloca(Ty, ArraySize, Name);
- return new llvm::AllocaInst(Ty, CGM.getDataLayout().getAllocaAddrSpace(),
- ArraySize, Name, AllocaInsertPt);
+ Alloca = Builder.CreateAlloca(Ty, ArraySize, Name);
+ else
+ Alloca = new llvm::AllocaInst(Ty, CGM.getDataLayout().getAllocaAddrSpace(),
+ ArraySize, Name, AllocaInsertPt);
+ if (Allocas) {
+ Allocas->Add(Alloca);
+ }
+ return Alloca;
}
/// CreateDefaultAlignTempAlloca - This creates an alloca with the
diff --git a/clang/lib/CodeGen/CGExprAgg.cpp b/clang/lib/CodeGen/CGExprAgg.cpp
index 1b9287ea239347..560a9e2c5ead5c 100644
--- a/clang/lib/CodeGen/CGExprAgg.cpp
+++ b/clang/lib/CodeGen/CGExprAgg.cpp
@@ -15,6 +15,7 @@
#include "CodeGenFunction.h"
#include "CodeGenModule.h"
#include "ConstantEmitter.h"
+#include "EHScopeStack.h"
#include "TargetInfo.h"
#include "clang/AST/ASTContext.h"
#include "clang/AST/Attr.h"
@@ -24,6 +25,7 @@
#include "llvm/IR/Constants.h"
#include "llvm/IR/Function.h"
#include "llvm/IR/GlobalVariable.h"
+#include "llvm/IR/Instruction.h"
#include "llvm/IR/IntrinsicInst.h"
#include "llvm/IR/Intrinsics.h"
using namespace clang;
@@ -558,24 +560,27 @@ void AggExprEmitter::EmitArrayInit(Address DestPtr, llvm::ArrayType *AType,
// For that, we'll need an EH cleanup.
QualType::DestructionKind dtorKind = elementType.isDestructedType();
Address endOfInit = Address::invalid();
- EHScopeStack::stable_iterator cleanup;
- llvm::Instruction *cleanupDominator = nullptr;
- if (CGF.needsEHCleanup(dtorKind)) {
+ CodeGenFunction::CleanupDeactivationScope deactivation(CGF);
+
+ if (dtorKind) {
+ CodeGenFunction::AllocaTrackerRAII allocaTracker(CGF);
// In principle we could tell the cleanup where we are more
// directly, but the control flow can get so varied here that it
// would actually be quite complex. Therefore we go through an
// alloca.
+ llvm::Instruction *dominatingIP =
+ Builder.CreateFlagLoad(llvm::ConstantInt::getNullValue(CGF.Int8PtrTy));
endOfInit = CGF.CreateTempAlloca(begin->getType(), CGF.getPointerAlign(),
"arrayinit.endOfInit");
- cleanupDominator = Builder.CreateStore(begin, endOfInit);
+ Builder.CreateStore(begin, endOfInit);
CGF.pushIrregularPartialArrayCleanup(begin, endOfInit, elementType,
elementAlign,
CGF.getDestroyer(dtorKind));
- cleanup = CGF.EHStack.stable_begin();
+ cast<EHCleanupScope>(*CGF.EHStack.find(CGF.EHStack.stable_begin()))
+ .AddAuxAllocas(allocaTracker.Take());
- // Otherwise, remember that we didn't need a cleanup.
- } else {
- dtorKind = QualType::DK_none;
+ CGF.DeferredDeactivationCleanupStack.push_back(
+ {CGF.EHStack.stable_begin(), dominatingIP});
}
llvm::Value *one = llvm::ConstantInt::get(CGF.SizeTy, 1);
@@ -671,9 +676,6 @@ void AggExprEmitter::EmitArrayInit(Address DestPtr, llvm::ArrayType *AType,
CGF.EmitBlock(endBB);
}
-
- // Leave the partial-array cleanup if we entered one.
- if (dtorKind) CGF.DeactivateCleanupBlock(cleanup, cleanupDominator);
}
//===----------------------------------------------------------------------===//
@@ -1374,9 +1376,8 @@ AggExprEmitter::VisitLambdaExpr(LambdaExpr *E) {
LValue SlotLV = CGF.MakeAddrLValue(Slot.getAddress(), E->getType());
// We'll need to enter cleanup scopes in case any of the element
- // initializers throws an exception.
- SmallVector<EHScopeStack::stable_iterator, 16> Cleanups;
- llvm::Instruction *CleanupDominator = nullptr;
+ // initializers throws an exception or contains branch out of the expressions.
+ CodeGenFunction::CleanupDeactivationScope scope(CGF);
CXXRecordDecl::field_iterator CurField = E->getLambdaClass()->field_begin();
for (LambdaExpr::const_capture_init_iterator i = E->capture_init_begin(),
@@ -1395,28 +1396,12 @@ AggExprEmitter::VisitLambdaExpr(LambdaExpr *E) {
if (QualType::DestructionKind DtorKind =
CurField->getType().isDestructedType()) {
assert(LV.isSimple());
- if (CGF.needsEHCleanup(DtorKind)) {
- if (!CleanupDominator)
- CleanupDominator = CGF.Builder.CreateAlignedLoad(
- CGF.Int8Ty,
- llvm::Constant::getNullValue(CGF.Int8PtrTy),
- CharUnits::One()); // placeholder
-
- CGF.pushDestroy(EHCleanup, LV.getAddress(CGF), CurField->getType(),
- CGF.getDestroyer(DtorKind), false);
- Cleanups.push_back(CGF.EHStack.stable_begin());
- }
+ if (DtorKind)
+ CGF.pushDestroyAndDeferDeactivation(
+ NormalAndEHCleanup, LV.getAddress(CGF), CurField->getType(),
+ CGF.getDestroyer(DtorKind), false);
}
}
-
- // Deactivate all the partial cleanups in reverse order, which
- // generally means popping them.
- for (unsigned i = Cleanups.size(); i != 0; --i)
- CGF.DeactivateCleanupBlock(Cleanups[i-1], CleanupDominator);
-
- // Destroy the placeholder if we made one.
- if (CleanupDominator)
- CleanupDominator->eraseFromParent();
}
void AggExprEmitter::VisitExprWithCleanups(ExprWithCleanups *E) {
@@ -1705,14 +1690,7 @@ void AggExprEmitter::VisitCXXParenListOrInitListExpr(
// We'll need to enter cleanup scopes in case any of the element
// initializers throws an exception.
SmallVector<EHScopeStack::stable_iterator, 16> cleanups;
- llvm::Instruction *cleanupDominator = nullptr;
- auto addCleanup = [&](const EHScopeStack::stable_iterator &cleanup) {
- cleanups.push_back(cleanup);
- if (!cleanupDominator) // create placeholder once needed
- cleanupDominator = CGF.Builder.CreateAlignedLoad(
- CGF.Int8Ty, llvm::Constant::getNullValue(CGF.Int8PtrTy),
- CharUnits::One());
- };
+ CodeGenFunction::CleanupDeactivationScope DeactivateCleanups(CGF);
unsigned curInitIndex = 0;
@@ -1735,10 +1713,8 @@ void AggExprEmitter::VisitCXXParenListOrInitListExpr(
CGF.EmitAggExpr(InitExprs[curInitIndex++], AggSlot);
if (QualType::DestructionKind dtorKind =
- Base.getType().isDestructedType()) {
- CGF.pushDestroy(dtorKind, V, Base.getType());
- addCleanup(CGF.EHStack.stable_begin());
- }
+ Base.getType().isDestructedType())
+ CGF.pushDestroyAndDeferDeactivation(dtorKind, V, Base.getType());
}
}
@@ -1813,10 +1789,10 @@ void AggExprEmitter::VisitCXXParenListOrInitListExpr(
if (QualType::DestructionKind dtorKind
= field->getType().isDestructedType()) {
assert(LV.isSimple());
- if (CGF.needsEHCleanup(dtorKind)) {
- CGF.pushDestroy(EHCleanup, LV.getAddress(CGF), field->getType(),
- CGF.getDestroyer(dtorKind), false);
- addCleanup(CGF.EHStack.stable_begin());
+ if (dtorKind) {
+ CGF.pushDestroyAndDeferDeactivation(
+ NormalAndEHCleanup, LV.getAddress(CGF), field->getType(),
+ CGF.getDestroyer(dtorKind), false);
pushedCleanup = true;
}
}
@@ -1829,17 +1805,6 @@ void AggExprEmitter::VisitCXXParenListOrInitListExpr(
if (GEP->use_empty())
GEP->eraseFromParent();
}
-
- // Deactivate all the partial cleanups in reverse order, which
- // generally means popping them.
- assert((cleanupDominator || cleanups.empty()) &&
- "Missing cleanupDominator before deactivating cleanup blocks");
- for (unsigned i = cleanups.size(); i != 0; --i)
- CGF.DeactivateCleanupBlock(cleanups[i-1], cleanupDominator);
-
- // Destroy the placeholder if we made one.
- if (cleanupDominator)
- cleanupDominator->eraseFromParent();
}
void AggExprEmitter::VisitArrayInitLoopExpr(const ArrayInitLoopExpr *E,
diff --git a/clang/lib/CodeGen/CGExprCXX.cpp b/clang/lib/CodeGen/CGExprCXX.cpp
index a4fb673284ceca..a88b29b326bb92 100644
--- a/clang/lib/CodeGen/CGExprCXX.cpp
+++ b/clang/lib/CodeGen/CGExprCXX.cpp
@@ -1008,8 +1008,8 @@ void CodeGenFunction::EmitNewArrayInitializer(
const Expr *Init = E->getInitializer();
Address EndOfInit = Address::invalid();
QualType::DestructionKind DtorKind = ElementType.isDestructedType();
- EHScopeStack::stable_iterator Cleanup;
- llvm::Instruction *CleanupDominator = nullptr;
+ CleanupDeactivationScope deactivation(*this);
+ bool pushedCleanup = false;
CharUnits ElementSize = getContext().getTypeSizeInChars(ElementType);
CharUnits ElementAlign =
@@ -1105,19 +1105,24 @@ void CodeGenFunction::EmitNewArrayInitializer(
}
// Enter a partial-destruction Cleanup if necessary.
- if (needsEHCleanup(DtorKind)) {
+ if (DtorKind) {
+ AllocaTrackerRAII AllocaTracker(*this);
// In principle we could tell the Cleanup where we are more
// directly, but the control flow can get so varied here that it
// would actually be quite complex. Therefore we go through an
// alloca.
+ llvm::Instruction *DominatingIP =
+ Builder.CreateFlagLoad(llvm::ConstantInt::getNullValue(Int8PtrTy));
EndOfInit = CreateTempAlloca(BeginPtr.getType(), getPointerAlign(),
"array.init.end");
- CleanupDominator =
- Builder.CreateStore(BeginPtr.emitRawPointer(*this), EndOfInit);
pushIrregularPartialArrayCleanup(BeginPtr.emitRawPointer(*this),
EndOfInit, ElementType, ElementAlign,
getDestroyer(DtorKind));
- Cleanup = EHStack.stable_begin();
+ cast<EHCleanupScope>(*EHStack.find(EHStack.stable_begin()))
+ .AddAuxAllocas(AllocaTracker.Take());
+ DeferredDeactivationCleanupStack.push_back(
+ {EHStack.stable_begin(), DominatingIP});
+ pushedCleanup = true;
}
CharUnits StartAlign = CurPtr.getAlignment();
@@ -1164,9 +1169,6 @@ void CodeGenFunction::EmitNewArrayInitializer(
// initialization.
llvm::ConstantInt *ConstNum = dyn_cast<llvm::ConstantInt>(NumElements);
if (ConstNum && ConstNum->getZExtValue() <= InitListElements) {
- // If there was a Cleanup, deactivate it.
- if (CleanupDominator)
- DeactivateCleanupBlock(Cleanup, CleanupDominator);
return;
}
@@ -1281,13 +1283,14 @@ void CodeGenFunction::EmitNewArrayInitializer(
Builder.CreateStore(CurPtr.emitRawPointer(*this), EndOfInit);
// Enter a partial-destruction Cleanup if necessary.
- if (!CleanupDominator && needsEHCleanup(DtorKind)) {
- llvm::Value *BeginPtrRaw = BeginPtr.emitRawPointer(*this);
- llvm::Value *CurPtrRaw = CurPtr.emitRawPointer(*this);
- pushRegularPartialArrayCleanup(BeginPtrRaw, CurPtrRaw, ElementType,
+ if (!pushedCleanup && needsEHCleanup(DtorKind)) {
+ llvm::Instruction *DominatingIP =
+ Builder.CreateFlagLoad(llvm::ConstantInt::getNullValue(Int8PtrTy));
+ pushRegularPartialArrayCleanup(BeginPtr.emitRawPointer(*this),
+ CurPtr.emitRawPointer(*this), ElementType,
ElementAlign, getDestroyer(DtorKind));
- Cleanup = EHStack.stable_begin();
- CleanupDominator = Builder.CreateUnreachable();
+ DeferredDeactivationCleanupStack.push_back(
+ {EHStack.stable_begin(), DominatingIP});
}
// Emit the initializer into this element.
@@ -1295,10 +1298,7 @@ void CodeGenFunction::EmitNewArrayInitializer(
AggValueSlot::DoesNotOverlap);
// Leave the Cleanup if we entered one.
- if (CleanupDominator) {
- DeactivateCleanupBlock(Cleanup, CleanupDominator);
- CleanupDominator->eraseFromParent();
- }
+ deactivation.ForceDeactivate();
// Advance to the next element by adjusting the pointer type as necessary.
llvm::Value *NextPtr = Builder.CreateConstInBoundsGEP1_32(
diff --git a/clang/lib/CodeGen/CodeGenFunction.cpp b/clang/lib/CodeGen/CodeGenFunction.cpp
index 86a6ddd80cc114..87766a758311d5 100644
--- a/clang/lib/CodeGen/CodeGenFunction.cpp
+++ b/clang/lib/CodeGen/CodeGenFunction.cpp
@@ -91,6 +91,8 @@ CodeGenFunction::CodeGenFunction(CodeGenModule &cgm, bool suppressNewContext)
CodeGenFunction::~CodeGenFunction() {
assert(LifetimeExtendedCleanupStack.empty() && "failed to emit a cleanup");
+ assert(DeferredDeactivationCleanupStack.empty() &&
+ "missed to deactivate a cleanup");
if (getLangOpts().OpenMP && CurFn)
CGM.getOpenMPRuntime().functionFinished(*this);
@@ -346,6 +348,10 @@ static void EmitIfUsed(CodeGenFunction &CGF, llvm::BasicBlock *BB) {
void CodeGenFunction::FinishFunction(SourceLocation EndLoc) {
assert(BreakContinueStack.empty() &&
"mismatched push/pop in break/continue stack!");
+ assert(LifetimeExtendedCleanupStack.empty() &&
+ "mismatched push/pop of cleanups in EHStack!");
+ assert(DeferredDeactivationCleanupStack.empty() &&
+ "mismatched activate/deactivate of cleanups!");
bool OnlySimpleReturnStmts = NumSimpleReturnExprs > 0
&& NumSimpleReturnExprs == NumReturnExprs
diff --git a/clang/lib/CodeGen/CodeGenFunction.h b/clang/lib/CodeGen/CodeGenFunction.h
index ff1873325d409f..d99188671f1f60 100644
--- a/clang/lib/CodeGen/CodeGenFunction.h
+++ b/clang/lib/CodeGen/CodeGenFunction.h
@@ -39,6 +39,7 @@
#include "llvm/ADT/MapVector.h"
#include "llvm/ADT/SmallVector.h"
#include "llvm/Frontend/OpenMP/OMPIRBuilder.h"
+#include "llvm/IR/Instructions.h"
#include "llvm/IR/ValueHandle.h"
#include "llvm/Support/Debug.h"
#include "llvm/Transforms/Utils/SanitizerStats.h"
@@ -670,6 +671,51 @@ class CodeGenFunction : public CodeGenTypeCache {
EHScopeStack EHStack;
llvm::SmallVector<char, 256> LifetimeExtendedCleanupStack;
+
+ // A stack of cleanups which were added to EHStack but have to be deactivated
+ // later before being popped or emitted. These are usually deactivated on
+ // exiting a `CleanupDeactivationScope` scope. For instance, after a
+ // full-expr.
+ //
+ // These are specially useful for correctly emitting cleanups while
+ // encountering branches out of expression (through stmt-expr or coroutine
+ // suspensions).
+ struct DeferredDeactivateCleanup {
+ EHScopeStack::stable_iterator Cleanup;
+ llvm::Instruction *DominatingIP;
+ };
+ llvm::SmallVector<DeferredDeactivateCleanup> DeferredDeactivationCleanupStack;
+
+ // Enters a new scope for capturing cleanups which are deferred to be
+ // deactivated, all of which will be deactivated once the scope is exited.
+ struct CleanupDeactivationScope {
+ CodeGenFunction &CGF;
+ size_t OldDeactivateCleanupStackSize;
+ bool Deactivated;
+ CleanupDeactivationScope(CodeGenFunction &CGF)
+ : CGF(CGF), OldDeactivateCleanupStackSize(
+ CGF.DeferredDeactivationCleanupStack.size()),
+ Deactivated(false) {}
+
+ void ForceDeactivate() {
+ assert(!Deactivated && "Deactivating already deactivated scope");
+ auto &Stack = CGF.DeferredDeactivationCleanupStack;
+ for (size_t I = Stack.size(); I > OldDeactivateCleanupStackSize; I--) {
+ CGF.DeactivateCleanupBlock(Stack[I - 1].Cleanup,
+ Stack[I - 1].DominatingIP);
+ Stack[I - 1].DominatingIP->eraseFromParent();
+ }
+ Stack.resize(OldDeactivateCleanupStackSize);
+ Deactivated = true;
+ }
+
+ ~CleanupDeactivationScope() {
+ if (Deactivated)
+ return;
+ ForceDeactivate();
+ }
+ };
+
llvm::SmallVector<const JumpDest *, 2> SEHTryEpilogueStack;
llvm::Instruction *CurrentFuncletPad = nullptr;
@@ -875,6 +921,19 @@ class CodeGenFunction : public CodeGenTypeCache {
new (Buffer + sizeof(Header) + sizeof(T)) RawAddress(ActiveFlag);
}
+ // Push a cleanup onto EHStack and deactivate it later. It is usually
+ // deactivated when exiting a `CleanupDeactivationScope` (for example: after a
+ // full expression).
+ template <class T, class... As>
+ void pushCleanupAndDeferDeactivation(CleanupKind Kind, As... A) {
+ // Placeholder dominating IP for this cleanup.
+ llvm::Instruction *DominatingIP =
+ Builder.CreateFlagLoad(llvm::Constant::getNullValue(Int8PtrTy));
+ EHStack.pushCleanup<T>(Kind, A...);
+ DeferredDeactivationCleanupStack.push_back(
+ {EHStack.stable_begin(), DominatingIP});
+ }
+
/// Set up the last cleanup that was pushed as a conditional
/// full-expression cleanup.
void initFullExprCleanup() {
@@ -898,7 +957,8 @@ class CodeGenFunction : public CodeGenTypeCache {
/// PopCleanupBlock - Will pop the cleanup entry on the stack and
/// process all branch fixups.
- void PopCleanupBlock(bool FallThroughIsBranchThrough = false);
+ void PopCleanupBlock(bool FallThroughIsBranchThrough = false,
+ bool ForDeactivation = false);
/// DeactivateCleanupBlock - Deactivates the given cleanup block.
/// The block cannot be reactivated. Pops it if it's the top of the
@@ -926,6 +986,7 @@ class CodeGenFunction : public CodeGenTypeCache {
class RunCleanupsScope {
EHScopeStack::stable_iterator CleanupStackDepth, OldCleanupScopeDepth;
size_t LifetimeExtendedCleanupStackSize;
+ CleanupDeactivationScope DeactivateCleanups;
bool OldDidCallStackSave;
protected:
bool PerformCleanup;
@@ -940,8 +1001,7 @@ class CodeGenFunction : public CodeGenTypeCache {
public:
/// Enter a new cleanup scope.
explicit RunCleanupsScope(CodeGenFunction &CGF)
- : PerformCleanup(true), CGF(CGF)
- {
+ : DeactivateCleanups(CGF), PerformCleanup(true), CGF(CGF) {
CleanupStackDepth = CGF.EHStack.stable_begin();
LifetimeExtendedCleanupStackSize =
CGF.LifetimeExtendedCleanupStack.size();
@@ -971,6 +1031,7 @@ class CodeGenFunction : public CodeGenTypeCache {
void ForceCleanup(std::initializer_list<llvm::Value**> ValuesToReload = {}) {
assert(PerformCleanup && "Already forced cleanup");
CGF.DidCallStackSave = OldDidCallStackSave;
+ DeactivateCleanups.ForceDeactivate();
CGF.PopCleanupBlocks(CleanupStackDepth, LifetimeExtendedCleanupStackSize,
ValuesToReload);
PerformCleanup = false;
@@ -2160,6 +2221,11 @@ class CodeGenFunction : public CodeGenTypeCache {
Address addr, QualType type);
void pushDestroy(CleanupKind kind, Address addr, QualType type,
Destroyer *destroyer, bool useEHCleanupForArray);
+ void pushDestroyAndDeferDeactivation(QualType::DestructionKind dtorKind,
+ Address addr, QualType type);
+ void pushDestroyAndDeferDeactivation(CleanupKind cleanupKind, Address addr,
+ QualType type, Destroyer *destroyer,
+ bool useEHCleanupForArray);
void pushLifetimeExtendedDestroy(CleanupKind kind, Address addr,
QualType type, Destroyer *destroyer,
bool useEHCleanupForArray);
@@ -2698,6 +2764,33 @@ class CodeGenFunction : public CodeGenTypeCache {
TBAAAccessInfo *TBAAInfo = nullptr);
LValue EmitLoadOfPointerLValue(Address Ptr, const PointerType *PtrTy);
+private:
+ struct AllocaTracker {
+ void Add(llvm::AllocaInst *I) { Allocas.push_back(I); }
+ llvm::SmallVector<llvm::AllocaInst *> Take() { return std::move(Allocas); }
+
+ private:
+ llvm::SmallVector<llvm::AllocaInst *> Allocas;
+ };
+ AllocaTracker *Allocas = nullptr;
+
+public:
+ // Captures all the allocas created during the scope of its RAII object.
+ struct AllocaTrackerRAII {
+ AllocaTrackerRAII(CodeGenFunction &CGF)
+ : CGF(CGF), OldTracker(CGF.Allocas) {
+ CGF.Allocas = &Tracker;
+ }
+ ~AllocaTrackerRAII() { CGF.Allocas = OldTracker; }
+
+ llvm::SmallVector<llvm::AllocaInst *> Take() { return Tracker.Take(); }
+
+ private:
+ CodeGenFunction &CGF;
+ AllocaTracker *OldTracker;
+ AllocaTracker Tracker;
+ };
+
/// CreateTempAlloca - This creates an alloca and inserts it into the entry
/// block if \p ArraySize is nullptr, otherwise inserts it at the current
/// insertion point of the builder. The caller is responsible for setting an
diff --git a/clang/test/CodeGenCXX/control-flow-in-stmt-expr.cpp b/clang/test/CodeGenCXX/control-flow-in-stmt-expr.cpp
new file mode 100644
index 00000000000000..0a51b0e4121c33
--- /dev/null
+++ b/clang/test/CodeGenCXX/control-flow-in-stmt-expr.cpp
@@ -0,0 +1,409 @@
+// RUN: %clang_cc1 --std=c++20 -fexceptions -triple x86_64-linux-gnu -emit-llvm %s -o - | FileCheck -check-prefixes=EH %s
+// RUN: %clang_cc1 --std=c++20 -triple x86_64-linux-gnu -emit-llvm %s -o - | FileCheck -check-prefixes=NOEH,CHECK %s
+
+struct Printy {
+ Printy(const char *name) : name(name) {}
+ ~Printy() {}
+ const char *name;
+};
+
+int foo() { return 2; }
+
+struct Printies {
+ Printy a;
+ Printy b;
+ Printy c;
+};
+
+void ParenInit() {
+ // CHECK-LABEL: define dso_local void @_Z9ParenInitv()
+ // CHECK: [[CLEANUP_DEST:%.+]] = alloca i32, align 4
+ Printies ps(Printy("a"),
+ // CHECK: call void @_ZN6PrintyC1EPKc
+ ({
+ if (foo()) return;
+ // CHECK: if.then:
+ // CHECK-NEXT: store i32 1, ptr [[CLEANUP_DEST]], align 4
+ // CHECK-NEXT: br label %cleanup
+ Printy("b");
+ // CHECK: if.end:
+ // CHECK-NEXT: call void @_ZN6PrintyC1EPKc
+ }),
+ ({
+ if (foo()) return;
+ // CHECK: if.then{{.*}}:
+ // CHECK-NEXT: store i32 1, ptr [[CLEANUP_DEST]], align 4
+ // CHECK-NEXT: call void @_ZN6PrintyD1Ev
+ // CHECK-NEXT: br label %cleanup
+ Printy("c");
+ // CHECK: if.end{{.*}}:
+ // CHECK-NEXT: call void @_ZN6PrintyC1EPKc
+ // CHECK-NEXT: call void @_ZN8PrintiesD1Ev
+ // CHECK-NEXT: br label %return
+ }));
+ // CHECK: cleanup:
+ // CHECK-NEXT: call void @_ZN6PrintyD1Ev
+ // CHECK-NEXT: br label %return
+}
+
+void break_in_stmt_expr() {
+ // Verify that the "break" in "if.then".calls dtor before jumping to "for.end".
+
+ // CHECK-LABEL: define dso_local void @_Z18break_in_stmt_exprv()
+ Printies p{Printy("a"),
+ // CHECK: call void @_ZN6PrintyC1EPKc
+ ({
+ for (;;) {
+ Printies ps{
+ Printy("b"),
+ // CHECK: for.cond:
+ // CHECK: call void @_ZN6PrintyC1EPKc
+ ({
+ if (foo()) {
+ break;
+ // CHECK: if.then:
+ // CHECK-NEXT: call void @_ZN6PrintyD1Ev
+ // CHECK-NEXT: br label %for.end
+ }
+ Printy("c");
+ // CHECK: if.end:
+ // CHECK-NEXT: call void @_ZN6PrintyC1EPKc
+ }),
+ Printy("d")};
+ // CHECK: call void @_ZN6PrintyC1EPKc
+ // CHECK-NEXT: call void @_ZN8PrintiesD1Ev
+ // CHECK-NEXT: br label %for.cond
+ }
+ Printy("e");
+ // CHECK: for.end:
+ // CHECK-NEXT: call void @_ZN6PrintyC1EPKc
+ }),
+ Printy("f")};
+ // CHECK: call void @_ZN6PrintyC1EPKc
+ // CHECK-NEXT: call void @_ZN8PrintiesD1Ev
+}
+
+void goto_in_stmt_expr() {
+ // Verify that:
+ // - correct branch fixups for deactivated normal cleanups are generated correctly.
+
+ // CHECK-LABEL: define dso_local void @_Z17goto_in_stmt_exprv()
+ // CHECK: [[CLEANUP_DEST_SLOT:%cleanup.dest.slot.*]] = alloca i32, align 4
+ {
+ Printies p1{Printy("a"), // CHECK: call void @_ZN6PrintyC1EPKc
+ ({
+ {
+ Printies p2{Printy("b"),
+ // CHECK: call void @_ZN6PrintyC1EPKc
+ ({
+ if (foo() == 1) {
+ goto in;
+ // CHECK: if.then:
+ // CHECK-NEXT: store i32 2, ptr [[CLEANUP_DEST_SLOT]], align 4
+ // CHECK-NEXT: br label %[[CLEANUP1:.+]]
+ }
+ if (foo() == 2) {
+ goto out;
+ // CHECK: if.then{{.*}}:
+ // CHECK-NEXT: store i32 3, ptr [[CLEANUP_DEST_SLOT]], align 4
+ // CHECK-NEXT: br label %[[CLEANUP1]]
+ }
+ Printy("c");
+ // CHECK: if.end{{.*}}:
+ // CHECK-NEXT: call void @_ZN6PrintyC1EPKc
+ }),
+ Printy("d")};
+ // CHECK: call void @_ZN6PrintyC1EPKc
+ // CHECK-NEXT: call void @_ZN8PrintiesD1Ev
+ // CHECK-NEXT: br label %in
+
+ }
+ in:
+ Printy("e");
+ // CHECK: in: ; preds = %if.end{{.*}}, %[[CLEANUP1]]
+ // CHECK-NEXT: call void @_ZN6PrintyC1EPKc
+ }),
+ Printy("f")};
+ // CHECK: call void @_ZN6PrintyC1EPKc
+ // CHECK-NEXT: call void @_ZN8PrintiesD1Ev
+ // CHECK-NEXT: br label %out
+ }
+out:
+ return;
+ // CHECK: out:
+ // CHECK-NEXT: ret void
+
+ // CHECK: [[CLEANUP1]]: ; preds = %if.then{{.*}}, %if.then
+ // CHECK-NEXT: call void @_ZN6PrintyD1Ev
+ // CHECK-NEXT: %cleanup.dest = load i32, ptr [[CLEANUP_DEST_SLOT]], align 4
+ // CHECK-NEXT: switch i32 %cleanup.dest, label %[[CLEANUP2:.+]] [
+ // CHECK-NEXT: i32 2, label %in
+ // CHECK-NEXT: ]
+
+ // CHECK: [[CLEANUP2]]: ; preds = %[[CLEANUP1]]
+ // CHECK-NEXT: call void @_ZN6PrintyD1Ev
+ // CHECK-NEXT: %cleanup.dest{{.*}} = load i32, ptr [[CLEANUP_DEST_SLOT]], align 4
+ // CHECK-NEXT: switch i32 %cleanup.dest{{.*}}, label %unreachable [
+ // CHECK-NEXT: i32 3, label %out
+ // CHECK-NEXT: ]
+}
+
+void ArrayInit() {
+ // Printy arr[4] = {ctorA, ctorB, stmt-exprC, stmt-exprD};
+ // Verify that:
+ // - We do the necessary stores for array cleanups (endOfInit and last constructed element).
+ // - We update the array init element correctly for ctorA, ctorB and stmt-exprC.
+ // - stmt-exprC and stmt-exprD share the array body dtor code (see %cleanup).
+
+ // CHECK-LABEL: define dso_local void @_Z9ArrayInitv()
+ // CHECK: %arrayinit.endOfInit = alloca ptr, align 8
+ // CHECK: %cleanup.dest.slot = alloca i32, align 4
+ // CHECK: %arrayinit.begin = getelementptr inbounds [4 x %struct.Printy], ptr %arr, i64 0, i64 0
+ // CHECK: store ptr %arrayinit.begin, ptr %arrayinit.endOfInit, align 8
+ Printy arr[4] = {
+ Printy("a"),
+ // CHECK: call void @_ZN6PrintyC1EPKc(ptr noundef nonnull align 8 dereferenceable(8) %arrayinit.begin, ptr noundef @.str)
+ // CHECK: [[ARRAYINIT_ELEMENT1:%.+]] = getelementptr inbounds %struct.Printy, ptr %arrayinit.begin, i64 1
+ // CHECK: store ptr [[ARRAYINIT_ELEMENT1]], ptr %arrayinit.endOfInit, align 8
+ Printy("b"),
+ // CHECK: call void @_ZN6PrintyC1EPKc(ptr noundef nonnull align 8 dereferenceable(8) [[ARRAYINIT_ELEMENT1]], ptr noundef @.str.1)
+ // CHECK: [[ARRAYINIT_ELEMENT2:%.+]] = getelementptr inbounds %struct.Printy, ptr [[ARRAYINIT_ELEMENT1]], i64 1
+ // CHECK: store ptr [[ARRAYINIT_ELEMENT2]], ptr %arrayinit.endOfInit, align 8
+ ({
+ // CHECK: br i1 {{.*}}, label %if.then, label %if.end
+ if (foo()) {
+ return;
+ // CHECK: if.then:
+ // CHECK-NEXT: store i32 1, ptr %cleanup.dest.slot, align 4
+ // CHECK-NEXT: br label %cleanup
+ }
+ // CHECK: if.end:
+ Printy("c");
+ // CHECK-NEXT: call void @_ZN6PrintyC1EPKc
+ // CHECK-NEXT: %arrayinit.element2 = getelementptr inbounds %struct.Printy, ptr %arrayinit.element1, i64 1
+ // CHECK-NEXT: store ptr %arrayinit.element2, ptr %arrayinit.endOfInit, align 8
+ }),
+ ({
+ // CHECK: br i1 {{%.+}} label %[[IF_THEN2:.+]], label %[[IF_END2:.+]]
+ if (foo()) {
+ return;
+ // CHECK: [[IF_THEN2]]:
+ // CHECK-NEXT: store i32 1, ptr %cleanup.dest.slot, align 4
+ // CHECK-NEXT: br label %cleanup
+ }
+ // CHECK: [[IF_END2]]:
+ Printy("d");
+ // CHECK-NEXT: call void @_ZN6PrintyC1EPKc
+ // CHECK-NEXT: %array.begin = getelementptr inbounds [4 x %struct.Printy], ptr %arr, i32 0, i32 0
+ // CHECK-NEXT: %0 = getelementptr inbounds %struct.Printy, ptr %array.begin, i64 4
+ // CHECK-NEXT: br label %[[ARRAY_DESTROY_BODY1:.+]]
+ }),
+ };
+
+ // CHECK: [[ARRAY_DESTROY_BODY1]]:
+ // CHECK-NEXT: %arraydestroy.elementPast{{.*}} = phi ptr [ %0, %[[IF_END2]] ], [ %arraydestroy.element{{.*}}, %[[ARRAY_DESTROY_BODY1]] ]
+ // CHECK-NEXT: %arraydestroy.element{{.*}} = getelementptr inbounds %struct.Printy, ptr %arraydestroy.elementPast{{.*}}, i64 -1
+ // CHECK-NEXT: call void @_ZN6PrintyD1Ev
+ // CHECK-NEXT: %arraydestroy.done{{.*}} = icmp eq ptr %arraydestroy.element{{.*}}, %array.begin
+ // CHECK-NEXT: br i1 %arraydestroy.done{{.*}}, label %[[ARRAY_DESTROY_DONE1:.+]], label %[[ARRAY_DESTROY_BODY1]]
+
+ // CHECK: [[ARRAY_DESTROY_DONE1]]:
+ // CHECK-NEXT: ret void
+
+ // CHECK: cleanup:
+ // CHECK-NEXT: %1 = load ptr, ptr %arrayinit.endOfInit, align 8
+ // CHECK-NEXT: %arraydestroy.isempty = icmp eq ptr %arrayinit.begin, %1
+ // CHECK-NEXT: br i1 %arraydestroy.isempty, label %[[ARRAY_DESTROY_DONE2:.+]], label %[[ARRAY_DESTROY_BODY2:.+]]
+
+ // CHECK: [[ARRAY_DESTROY_BODY2]]:
+ // CHECK-NEXT: %arraydestroy.elementPast = phi ptr [ %1, %cleanup ], [ %arraydestroy.element, %[[ARRAY_DESTROY_BODY2]] ]
+ // CHECK-NEXT: %arraydestroy.element = getelementptr inbounds %struct.Printy, ptr %arraydestroy.elementPast, i64 -1
+ // CHECK-NEXT: call void @_ZN6PrintyD1Ev(ptr noundef nonnull align 8 dereferenceable(8) %arraydestroy.element)
+ // CHECK-NEXT: %arraydestroy.done = icmp eq ptr %arraydestroy.element, %arrayinit.begin
+ // CHECK-NEXT: br i1 %arraydestroy.done, label %[[ARRAY_DESTROY_DONE2]], label %[[ARRAY_DESTROY_BODY2]]
+
+ // CHECK: [[ARRAY_DESTROY_DONE2]]:
+ // CHECK-NEXT: br label %[[ARRAY_DESTROY_DONE1]]
+}
+
+void ArraySubobjects() {
+ struct S {
+ Printy arr1[2];
+ Printy arr2[2];
+ Printy p;
+ };
+ // CHECK-LABEL: define dso_local void @_Z15ArraySubobjectsv()
+ // CHECK: %arrayinit.endOfInit = alloca ptr, align 8
+ S s{{Printy("a"), Printy("b")},
+ // CHECK: call void @_ZN6PrintyC1EPKc
+ // CHECK: call void @_ZN6PrintyC1EPKc
+ {Printy("a"),
+ // CHECK: [[ARRAYINIT_BEGIN:%.+]] = getelementptr inbounds [2 x %struct.Printy]
+ // CHECK: store ptr [[ARRAYINIT_BEGIN]], ptr %arrayinit.endOfInit, align 8
+ // CHECK: call void @_ZN6PrintyC1EPKc
+ // CHECK: [[ARRAYINIT_ELEMENT:%.+]] = getelementptr inbounds %struct.Printy
+ // CHECK: store ptr [[ARRAYINIT_ELEMENT]], ptr %arrayinit.endOfInit, align 8
+ ({
+ if (foo()) {
+ return;
+ // CHECK: if.then:
+ // CHECK-NEXT: [[V0:%.+]] = load ptr, ptr %arrayinit.endOfInit, align 8
+ // CHECK-NEXT: %arraydestroy.isempty = icmp eq ptr [[ARRAYINIT_BEGIN]], [[V0]]
+ // CHECK-NEXT: br i1 %arraydestroy.isempty, label %[[ARRAY_DESTROY_DONE:.+]], label %[[ARRAY_DESTROY_BODY:.+]]
+ }
+ Printy("b");
+ })
+ },
+ Printy("c")
+ // CHECK: if.end:
+ // CHECK-NEXT: call void @_ZN6PrintyC1EPKc
+ // CHECK: call void @_ZN6PrintyC1EPKc
+ // CHECK-NEXT: call void @_ZZ15ArraySubobjectsvEN1SD1Ev
+ // CHECK-NEXT: br label %return
+ };
+ // CHECK: return:
+ // CHECK-NEXT: ret void
+
+ // CHECK: [[ARRAY_DESTROY_BODY]]:
+ // CHECK-NEXT: %arraydestroy.elementPast = phi ptr [ %0, %if.then ], [ %arraydestroy.element, %[[ARRAY_DESTROY_BODY]] ]
+ // CHECK-NEXT: %arraydestroy.element = getelementptr inbounds %struct.Printy, ptr %arraydestroy.elementPast, i64 -1
+ // CHECK-NEXT: call void @_ZN6PrintyD1Ev(ptr noundef nonnull align 8 dereferenceable(8) %arraydestroy.element)
+ // CHECK-NEXT: %arraydestroy.done = icmp eq ptr %arraydestroy.element, [[ARRAYINIT_BEGIN]]
+ // CHECK-NEXT: br i1 %arraydestroy.done, label %[[ARRAY_DESTROY_DONE]], label %[[ARRAY_DESTROY_BODY]]
+
+ // CHECK: [[ARRAY_DESTROY_DONE]]
+ // CHECK-NEXT: [[ARRAY_BEGIN:%.+]] = getelementptr inbounds [2 x %struct.Printy], ptr %arr1, i32 0, i32 0
+ // CHECK-NEXT: [[V1:%.+]] = getelementptr inbounds %struct.Printy, ptr [[ARRAY_BEGIN]], i64 2
+ // CHECK-NEXT: br label %[[ARRAY_DESTROY_BODY2:.+]]
+
+ // CHECK: [[ARRAY_DESTROY_BODY2]]:
+ // CHECK-NEXT: %arraydestroy.elementPast5 = phi ptr [ %1, %[[ARRAY_DESTROY_DONE]] ], [ %arraydestroy.element6, %[[ARRAY_DESTROY_BODY2]] ]
+ // CHECK-NEXT: %arraydestroy.element6 = getelementptr inbounds %struct.Printy, ptr %arraydestroy.elementPast5, i64 -1
+ // CHECK-NEXT: call void @_ZN6PrintyD1Ev(ptr noundef nonnull align 8 dereferenceable(8) %arraydestroy.element6)
+ // CHECK-NEXT: %arraydestroy.done7 = icmp eq ptr %arraydestroy.element6, [[ARRAY_BEGIN]]
+ // CHECK-NEXT: br i1 %arraydestroy.done7, label %[[ARRAY_DESTROY_DONE2:.+]], label %[[ARRAY_DESTROY_BODY2]]
+
+
+ // CHECK: [[ARRAY_DESTROY_DONE2]]:
+ // CHECK-NEXT: br label %return
+}
+
+void LambdaInit() {
+ // CHECK-LABEL: define dso_local void @_Z10LambdaInitv()
+ auto S = [a = Printy("a"), b = ({
+ if (foo()) {
+ return;
+ // CHECK: if.then:
+ // CHECK-NEXT: call void @_ZN6PrintyD1Ev
+ // CHECK-NEXT: br label %return
+ }
+ Printy("b");
+ })]() { return a; };
+}
+
+void LifetimeExtended() {
+ // CHECK-LABEL: define dso_local void @_Z16LifetimeExtendedv
+ struct PrintyRefBind {
+ const Printy &a;
+ const Printy &b;
+ };
+ PrintyRefBind ps = {Printy("a"), ({
+ if (foo()) {
+ return;
+ // CHECK: if.then:
+ // CHECK-NEXT: call void @_ZN6PrintyD1Ev
+ // CHECK-NEXT: br label %return
+ }
+ Printy("b");
+ })};
+}
+
+void NewArrayInit() {
+ // CHECK-LABEL: define dso_local void @_Z12NewArrayInitv()
+ // CHECK: %array.init.end = alloca ptr, align 8
+ // CHECK: store ptr %0, ptr %array.init.end, align 8
+ Printy *array = new Printy[3]{
+ "a",
+ // CHECK: call void @_ZN6PrintyC1EPKc
+ // CHECK: store ptr %array.exp.next, ptr %array.init.end, align 8
+ "b",
+ // CHECK: call void @_ZN6PrintyC1EPKc
+ // CHECK: store ptr %array.exp.next1, ptr %array.init.end, align 8
+ ({
+ if (foo()) {
+ return;
+ // CHECK: if.then:
+ // CHECK: br i1 %arraydestroy.isempty, label %arraydestroy.done{{.*}}, label %arraydestroy.body
+ }
+ "b";
+ // CHECK: if.end:
+ // CHECK: call void @_ZN6PrintyC1EPKc
+ })};
+ // CHECK: arraydestroy.body:
+ // CHECK-NEXT: %arraydestroy.elementPast = phi ptr [ %{{.*}}, %if.then ], [ %arraydestroy.element, %arraydestroy.body ]
+ // CHECK-NEXT: %arraydestroy.element = getelementptr inbounds %struct.Printy, ptr %arraydestroy.elementPast, i64 -1
+ // CHECK-NEXT: call void @_ZN6PrintyD1Ev(ptr noundef nonnull align 8 dereferenceable(8) %arraydestroy.element)
+ // CHECK-NEXT: %arraydestroy.done = icmp eq ptr %arraydestroy.element, %0
+ // CHECK-NEXT: br i1 %arraydestroy.done, label %arraydestroy.done{{.*}}, label %arraydestroy.body
+
+ // CHECK: arraydestroy.done{{.*}}: ; preds = %arraydestroy.body, %if.then
+ // CHECK-NEXT: br label %return
+}
+
+void DestroyInConditionalCleanup() {
+ // EH-LABEL: DestroyInConditionalCleanupv()
+ // NOEH-LABEL: DestroyInConditionalCleanupv()
+ struct A {
+ A() {}
+ ~A() {}
+ };
+
+ struct Value {
+ Value(A) {}
+ ~Value() {}
+ };
+
+ struct V2 {
+ Value K;
+ Value V;
+ };
+ // Verify we use conditional cleanups.
+ (void)(foo() ? V2{A(), A()} : V2{A(), A()});
+ // NOEH: cond.true:
+ // NOEH: call void @_ZZ27DestroyInConditionalCleanupvEN1AC1Ev
+ // NOEH: store ptr %{{.*}}, ptr %cond-cleanup.save
+
+ // EH: cond.true:
+ // EH: invoke void @_ZZ27DestroyInConditionalCleanupvEN1AC1Ev
+ // EH: store ptr %{{.*}}, ptr %cond-cleanup.save
+}
+
+void ArrayInitWithContinue() {
+ // CHECK-LABEL: @_Z21ArrayInitWithContinuev
+ // Verify that we start to emit the array destructor.
+ // CHECK: %arrayinit.endOfInit = alloca ptr, align 8
+ for (int i = 0; i < 1; ++i) {
+ Printy arr[2] = {"a", ({
+ if (foo()) {
+ continue;
+ }
+ "b";
+ })};
+ }
+}
+
+struct [[clang::trivial_abi]] HasTrivialABI {
+ HasTrivialABI();
+ ~HasTrivialABI();
+};
+void AcceptTrivialABI(HasTrivialABI, int);
+void TrivialABI() {
+ // CHECK-LABEL: define dso_local void @_Z10TrivialABIv()
+ AcceptTrivialABI(HasTrivialABI(), ({
+ if (foo()) return;
+ // CHECK: if.then:
+ // CHECK-NEXT: call void @_ZN13HasTrivialABID1Ev
+ // CHECK-NEXT: br label %return
+ 0;
+ }));
+}
diff --git a/clang/test/CodeGenCoroutines/coro-suspend-cleanups.cpp b/clang/test/CodeGenCoroutines/coro-suspend-cleanups.cpp
new file mode 100644
index 00000000000000..06cc2069dbe9ae
--- /dev/null
+++ b/clang/test/CodeGenCoroutines/coro-suspend-cleanups.cpp
@@ -0,0 +1,93 @@
+// RUN: %clang_cc1 --std=c++20 -triple x86_64-linux-gnu -emit-llvm %s -o - | FileCheck %s
+
+#include "Inputs/coroutine.h"
+
+struct Printy {
+ Printy(const char *name) : name(name) {}
+ ~Printy() {}
+ const char *name;
+};
+
+struct coroutine {
+ struct promise_type;
+ std::coroutine_handle<promise_type> handle;
+ ~coroutine() {
+ if (handle) handle.destroy();
+ }
+};
+
+struct coroutine::promise_type {
+ coroutine get_return_object() {
+ return {std::coroutine_handle<promise_type>::from_promise(*this)};
+ }
+ std::suspend_never initial_suspend() noexcept { return {}; }
+ std::suspend_always final_suspend() noexcept { return {}; }
+ void return_void() {}
+ void unhandled_exception() {}
+};
+
+struct Awaiter : std::suspend_always {
+ Printy await_resume() { return {"awaited"}; }
+};
+
+int foo() { return 2; }
+
+coroutine ArrayInitCoro() {
+ // Verify that:
+ // - We do the necessary stores for array cleanups.
+ // - Array cleanups are called by await.cleanup.
+ // - We activate the cleanup after the first element and deactivate it in await.ready (see cleanup.isactive).
+
+ // CHECK-LABEL: define dso_local void @_Z13ArrayInitCorov
+ // CHECK: %arrayinit.endOfInit = alloca ptr, align 8
+ // CHECK: %cleanup.isactive = alloca i1, align 1
+ Printy arr[2] = {
+ Printy("a"),
+ // CHECK: %arrayinit.begin = getelementptr inbounds [2 x %struct.Printy], ptr %arr.reload.addr, i64 0, i64 0
+ // CHECK-NEXT: %arrayinit.begin.spill.addr = getelementptr inbounds %_Z13ArrayInitCorov.Frame, ptr %0, i32 0, i32 10
+ // CHECK-NEXT: store ptr %arrayinit.begin, ptr %arrayinit.begin.spill.addr, align 8
+ // CHECK-NEXT: store i1 true, ptr %cleanup.isactive.reload.addr, align 1
+ // CHECK-NEXT: store ptr %arrayinit.begin, ptr %arrayinit.endOfInit.reload.addr, align 8
+ // CHECK-NEXT: call void @_ZN6PrintyC1EPKc(ptr noundef nonnull align 8 dereferenceable(8) %arrayinit.begin, ptr noundef @.str)
+ // CHECK-NEXT: %arrayinit.element = getelementptr inbounds %struct.Printy, ptr %arrayinit.begin, i64 1
+ // CHECK-NEXT: %arrayinit.element.spill.addr = getelementptr inbounds %_Z13ArrayInitCorov.Frame, ptr %0, i32 0, i32 11
+ // CHECK-NEXT: store ptr %arrayinit.element, ptr %arrayinit.element.spill.addr, align 8
+ // CHECK-NEXT: store ptr %arrayinit.element, ptr %arrayinit.endOfInit.reload.addr, align 8
+ co_await Awaiter{}
+ // CHECK-NEXT: @_ZNSt14suspend_always11await_readyEv
+ // CHECK-NEXT: br i1 %{{.+}}, label %await.ready, label %CoroSave30
+ };
+ // CHECK: await.cleanup: ; preds = %AfterCoroSuspend{{.*}}
+ // CHECK-NEXT: br label %cleanup{{.*}}.from.await.cleanup
+
+ // CHECK: cleanup{{.*}}.from.await.cleanup: ; preds = %await.cleanup
+ // CHECK: br label %cleanup{{.*}}
+
+ // CHECK: await.ready:
+ // CHECK-NEXT: %arrayinit.element.reload.addr = getelementptr inbounds %_Z13ArrayInitCorov.Frame, ptr %0, i32 0, i32 11
+ // CHECK-NEXT: %arrayinit.element.reload = load ptr, ptr %arrayinit.element.reload.addr, align 8
+ // CHECK-NEXT: call void @_ZN7Awaiter12await_resumeEv
+ // CHECK-NEXT: store i1 false, ptr %cleanup.isactive.reload.addr, align 1
+ // CHECK-NEXT: br label %cleanup{{.*}}.from.await.ready
+
+ // CHECK: cleanup{{.*}}: ; preds = %cleanup{{.*}}.from.await.ready, %cleanup{{.*}}.from.await.cleanup
+ // CHECK: %cleanup.is_active = load i1, ptr %cleanup.isactive.reload.addr, align 1
+ // CHECK-NEXT: br i1 %cleanup.is_active, label %cleanup.action, label %cleanup.done
+
+ // CHECK: cleanup.action:
+ // CHECK: %arraydestroy.isempty = icmp eq ptr %arrayinit.begin.reload{{.*}}, %{{.*}}
+ // CHECK-NEXT: br i1 %arraydestroy.isempty, label %arraydestroy.done{{.*}}, label %arraydestroy.body.from.cleanup.action
+ // Ignore rest of the array cleanup.
+}
+
+coroutine ArrayInitWithCoReturn() {
+ // CHECK-LABEL: define dso_local void @_Z21ArrayInitWithCoReturnv
+ // Verify that we start to emit the array destructor.
+ // CHECK: %arrayinit.endOfInit = alloca ptr, align 8
+ Printy arr[2] = {"a", ({
+ if (foo()) {
+ co_return;
+ }
+ "b";
+ })};
+}
>From d2448b40670b8ba9a79d3691808bfad3c868e9b4 Mon Sep 17 00:00:00 2001
From: Utkarsh Saxena <usx at google.com>
Date: Wed, 17 Apr 2024 22:59:51 +0000
Subject: [PATCH 2/4] Always create active cleanup flag for deactivated normal
cleanups
---
clang/lib/CodeGen/CGCleanup.cpp | 27 ++------
.../CodeGenCXX/control-flow-in-stmt-expr.cpp | 69 +++++++++++++++++++
2 files changed, 74 insertions(+), 22 deletions(-)
diff --git a/clang/lib/CodeGen/CGCleanup.cpp b/clang/lib/CodeGen/CGCleanup.cpp
index 8683f19d9da28e..469e0363b744ac 100644
--- a/clang/lib/CodeGen/CGCleanup.cpp
+++ b/clang/lib/CodeGen/CGCleanup.cpp
@@ -1169,25 +1169,6 @@ void CodeGenFunction::EmitBranchThroughCleanup(JumpDest Dest) {
Builder.ClearInsertionPoint();
}
-static bool IsUsedAsNormalCleanup(EHScopeStack &EHStack,
- EHScopeStack::stable_iterator C) {
- // If we needed a normal block for any reason, that counts.
- if (cast<EHCleanupScope>(*EHStack.find(C)).getNormalBlock())
- return true;
-
- // Check whether any enclosed cleanups were needed.
- for (EHScopeStack::stable_iterator
- I = EHStack.getInnermostNormalCleanup();
- I != C; ) {
- assert(C.strictlyEncloses(I));
- EHCleanupScope &S = cast<EHCleanupScope>(*EHStack.find(I));
- if (S.getNormalBlock()) return true;
- I = S.getEnclosingNormalCleanup();
- }
-
- return false;
-}
-
static bool IsUsedAsEHCleanup(EHScopeStack &EHStack,
EHScopeStack::stable_iterator cleanup) {
// If we needed an EH block for any reason, that counts.
@@ -1236,8 +1217,7 @@ static void SetupCleanupBlockActivation(CodeGenFunction &CGF,
// Calculate whether the cleanup was used:
// - as a normal cleanup
- if (Scope.isNormalCleanup() &&
- (isActivatedInConditional || IsUsedAsNormalCleanup(CGF.EHStack, C))) {
+ if (Scope.isNormalCleanup()) {
Scope.setTestFlagInNormalCleanup();
needFlag = true;
}
@@ -1250,13 +1230,16 @@ static void SetupCleanupBlockActivation(CodeGenFunction &CGF,
}
// If it hasn't yet been used as either, we're done.
- if (!needFlag) return;
+ if (!needFlag)
+ return;
Address var = Scope.getActiveFlag();
if (!var.isValid()) {
+ CodeGenFunction::AllocaTrackerRAII AllocaTracker(CGF);
var = CGF.CreateTempAlloca(CGF.Builder.getInt1Ty(), CharUnits::One(),
"cleanup.isactive");
Scope.setActiveFlag(var);
+ Scope.AddAuxAllocas(AllocaTracker.Take());
assert(dominatingIP && "no existing variable and no dominating IP!");
diff --git a/clang/test/CodeGenCXX/control-flow-in-stmt-expr.cpp b/clang/test/CodeGenCXX/control-flow-in-stmt-expr.cpp
index 0a51b0e4121c33..73098392d50a87 100644
--- a/clang/test/CodeGenCXX/control-flow-in-stmt-expr.cpp
+++ b/clang/test/CodeGenCXX/control-flow-in-stmt-expr.cpp
@@ -407,3 +407,72 @@ void TrivialABI() {
0;
}));
}
+
+namespace CleanupFlag {
+struct A {
+ A() {}
+ ~A() {}
+};
+
+struct B {
+ B(const A&) {}
+ B() {}
+ ~B() {}
+};
+
+struct S {
+ A a;
+ B b;
+};
+
+int AcceptS(S s);
+
+void Accept2(int x, int y);
+
+void InactiveNormalCleanup() {
+ // CHECK-LABEL: define {{.*}}InactiveNormalCleanupEv()
+
+ // The first A{} below is an inactive normal cleanup which
+ // is not popped from EHStack on deactivation. This needs an
+ // "active" cleanup flag.
+
+ // CHECK: [[ACTIVE:%cleanup.isactive.*]] = alloca i1, align 1
+ // CHECK: call void [[A_CTOR:@.*AC1Ev]]
+ // CHECK: store i1 true, ptr [[ACTIVE]], align 1
+ // CHECK: call void [[A_CTOR]]
+ // CHECK: call void [[B_CTOR:@.*BC1ERKNS_1AE]]
+ // CHECK: store i1 false, ptr [[ACTIVE]], align 1
+ // CHECK: call noundef i32 [[ACCEPTS:@.*AcceptSENS_1SE]]
+ Accept2(AcceptS({.a = A{}, .b = A{}}), ({
+ if (foo()) return;
+ // CHECK: if.then:
+ // CHECK: br label %cleanup
+ 0;
+ // CHECK: if.end:
+ // CHECK: call void [[ACCEPT2:@.*Accept2Eii]]
+ // CHECK: br label %cleanup
+ }));
+ // CHECK: cleanup:
+ // CHECK: call void [[S_DTOR:@.*SD1Ev]]
+ // CHECK: call void [[A_DTOR:@.*AD1Ev]]
+ // CHECK: %cleanup.is_active = load i1, ptr [[ACTIVE]]
+ // CHECK: br i1 %cleanup.is_active, label %cleanup.action, label %cleanup.done
+
+ // CHECK: cleanup.action:
+ // CHECK: call void [[A_DTOR]]
+
+ // The "active" cleanup flag is not required for unused cleanups.
+ Accept2(AcceptS({.a = A{}, .b = A{}}), 0);
+ // CHECK: cleanup.cont:
+ // CHECK: call void [[A_CTOR]]
+ // CHECK-NOT: store i1 true
+ // CHECK: call void [[A_CTOR]]
+ // CHECK: call void [[B_CTOR]]
+ // CHECK-NOT: store i1 false
+ // CHECK: call noundef i32 [[ACCEPTS]]
+ // CHECK: call void [[ACCEPT2]]
+ // CHECK: call void [[S_DTOR]]
+ // CHECK: call void [[A_DTOR]]
+ // CHECK: br label %return
+}
+} // namespace CleanupFlag
>From adf9bc902baddb156c83ce0f8ec03c142e806d45 Mon Sep 17 00:00:00 2001
From: Utkarsh Saxena <usx at google.com>
Date: Sun, 21 Apr 2024 15:33:38 +0000
Subject: [PATCH 3/4] Fix conditional lifetime extension
---
clang/lib/CodeGen/CGDecl.cpp | 22 +++++---
clang/test/CodeGenCXX/blocks.cpp | 4 +-
.../CodeGenCXX/control-flow-in-stmt-expr.cpp | 52 +++++++++++++++++--
.../test/CodeGenObjC/arc-blocks-exceptions.m | 15 ++++--
clang/test/CodeGenObjC/arc-blocks.m | 4 +-
5 files changed, 77 insertions(+), 20 deletions(-)
diff --git a/clang/lib/CodeGen/CGDecl.cpp b/clang/lib/CodeGen/CGDecl.cpp
index 3f05ebb561da57..9cc67cdbe424b4 100644
--- a/clang/lib/CodeGen/CGDecl.cpp
+++ b/clang/lib/CodeGen/CGDecl.cpp
@@ -36,6 +36,7 @@
#include "llvm/Analysis/ValueTracking.h"
#include "llvm/IR/DataLayout.h"
#include "llvm/IR/GlobalVariable.h"
+#include "llvm/IR/Instructions.h"
#include "llvm/IR/Intrinsics.h"
#include "llvm/IR/Type.h"
#include <optional>
@@ -2255,25 +2256,32 @@ void CodeGenFunction::pushLifetimeExtendedDestroy(CleanupKind cleanupKind,
}
// Otherwise, we should only destroy the object if it's been initialized.
- // Re-use the active flag and saved address across both the EH and end of
- // scope cleanups.
- using SavedType = typename DominatingValue<Address>::saved_type;
using ConditionalCleanupType =
EHScopeStack::ConditionalCleanup<DestroyObject, Address, QualType,
Destroyer *, bool>;
+ DominatingValue<Address>::saved_type SavedAddr = saveValueInCond(addr);
- Address ActiveFlag = createCleanupActiveFlag();
- SavedType SavedAddr = saveValueInCond(addr);
+ // Remember to emit cleanup if we branch-out before end of full-expression
+ // (eg: through stmt-expr or coro suspensions).
+ AllocaTrackerRAII DeactivationAllocas(*this);
+ Address ActiveFlagForDeactivation = createCleanupActiveFlag();
pushCleanupAndDeferDeactivation<ConditionalCleanupType>(
cleanupKind, SavedAddr, type, destroyer, useEHCleanupForArray);
- initFullExprCleanupWithFlag(ActiveFlag);
+ initFullExprCleanupWithFlag(ActiveFlagForDeactivation);
+ EHCleanupScope &cleanup = cast<EHCleanupScope>(*EHStack.begin());
+ // Erase the active flag if the cleanup was not emitted.
+ cleanup.AddAuxAllocas(std::move(DeactivationAllocas).Take());
// Since this is lifetime-extended, push it once again to the EHStack after
// the full expression.
+ // The previous active flag would always be 'false' due to forced deferred
+ // deactivation. Use a separate flag for lifetime-extension to correctly
+ // remember if this branch was taken and the object was initialized.
+ Address ActiveFlagForLifetimeExt = createCleanupActiveFlag();
pushCleanupAfterFullExprWithActiveFlag<ConditionalCleanupType>(
- cleanupKind, ActiveFlag, SavedAddr, type, destroyer,
+ cleanupKind, ActiveFlagForLifetimeExt, SavedAddr, type, destroyer,
useEHCleanupForArray);
}
diff --git a/clang/test/CodeGenCXX/blocks.cpp b/clang/test/CodeGenCXX/blocks.cpp
index eaab1890dfc49b..afe07889055398 100644
--- a/clang/test/CodeGenCXX/blocks.cpp
+++ b/clang/test/CodeGenCXX/blocks.cpp
@@ -149,8 +149,8 @@ namespace test5 {
// CHECK-NEXT: [[X:%.*]] = alloca [[A:%.*]], align 4
// CHECK-NEXT: [[B:%.*]] = alloca ptr, align 8
// CHECK-NEXT: [[BLOCK:%.*]] = alloca [[BLOCK_T:.*]], align 8
- // CHECK-NEXT: [[CLEANUP_ACTIVE:%.*]] = alloca i1
// CHECK-NEXT: [[COND_CLEANUP_SAVE:%.*]] = alloca ptr, align 8
+ // CHECK-NEXT: [[CLEANUP_ACTIVE:%.*]] = alloca i1
// CHECK-NEXT: [[T0:%.*]] = zext i1
// CHECK-NEXT: store i8 [[T0]], ptr [[COND]], align 1
// CHECK-NEXT: call void @_ZN5test51AC1Ev(ptr {{[^,]*}} [[X]])
@@ -162,8 +162,8 @@ namespace test5 {
// CHECK-NOT: br
// CHECK: [[CAPTURE:%.*]] = getelementptr inbounds [[BLOCK_T]], ptr [[BLOCK]], i32 0, i32 5
// CHECK-NEXT: call void @_ZN5test51AC1ERKS0_(ptr {{[^,]*}} [[CAPTURE]], ptr noundef nonnull align {{[0-9]+}} dereferenceable({{[0-9]+}}) [[X]])
- // CHECK-NEXT: store i1 true, ptr [[CLEANUP_ACTIVE]]
// CHECK-NEXT: store ptr [[CAPTURE]], ptr [[COND_CLEANUP_SAVE]], align 8
+ // CHECK-NEXT: store i1 true, ptr [[CLEANUP_ACTIVE]]
// CHECK-NEXT: br label
// CHECK: br label
// CHECK: phi
diff --git a/clang/test/CodeGenCXX/control-flow-in-stmt-expr.cpp b/clang/test/CodeGenCXX/control-flow-in-stmt-expr.cpp
index 73098392d50a87..ac466ee5bba40b 100644
--- a/clang/test/CodeGenCXX/control-flow-in-stmt-expr.cpp
+++ b/clang/test/CodeGenCXX/control-flow-in-stmt-expr.cpp
@@ -301,12 +301,21 @@ void LambdaInit() {
})]() { return a; };
}
+struct PrintyRefBind {
+ const Printy &a;
+ const Printy &b;
+};
+
+struct Temp {
+ Temp();
+ ~Temp();
+};
+Temp CreateTemp();
+Printy CreatePrinty();
+Printy CreatePrinty(const Temp&);
+
void LifetimeExtended() {
// CHECK-LABEL: define dso_local void @_Z16LifetimeExtendedv
- struct PrintyRefBind {
- const Printy &a;
- const Printy &b;
- };
PrintyRefBind ps = {Printy("a"), ({
if (foo()) {
return;
@@ -318,6 +327,41 @@ void LifetimeExtended() {
})};
}
+void ConditionalLifetimeExtended() {
+ // CHECK-LABEL: @_Z27ConditionalLifetimeExtendedv()
+
+ // Verify that we create two cleanup flags.
+ // 1. First for the cleanup which is deactivated after full expression.
+ // 2. Second for the life-ext cleanup which is activated if the branch is taken.
+
+ // Note: We use `CreateTemp()` to ensure that life-ext destroy cleanup is not at
+ // the top of EHStack on deactivation. This ensures using active flags.
+
+ Printy* p1 = nullptr;
+ // CHECK: store i1 false, ptr [[BRANCH1_DEFERRED:%cleanup.cond]], align 1
+ // CHECK-NEXT: store i1 false, ptr [[BRANCH1_LIFEEXT:%cleanup.cond.*]], align 1
+ PrintyRefBind ps = {
+ p1 != nullptr ? static_cast<const Printy&>(CreatePrinty())
+ // CHECK: cond.true:
+ // CHECK-NEXT: call void @_Z12CreatePrintyv
+ // CHECK-NEXT: store i1 true, ptr [[BRANCH1_DEFERRED]], align 1
+ // CHECK-NEXT: store i1 true, ptr [[BRANCH1_LIFEEXT]], align 1
+ // CHECK-NEXT: br label %{{.*}}
+ : foo() ? static_cast<const Printy&>(CreatePrinty(CreateTemp()))
+ : *p1,
+ ({
+ if (foo()) return;
+ Printy("c");
+ // CHECK: if.end:
+ // CHECK-NEXT: call void @_ZN6PrintyC1EPKc
+ // CHECK-NEXT: store ptr
+ })};
+ // CHECK-NEXT: store i1 false, ptr [[BRANCH1_DEFERRED]], align 1
+ // CHECK-NEXT: store i32 0, ptr %cleanup.dest.slot, align 4
+ // CHECK-NEXT: br label %cleanup
+
+}
+
void NewArrayInit() {
// CHECK-LABEL: define dso_local void @_Z12NewArrayInitv()
// CHECK: %array.init.end = alloca ptr, align 8
diff --git a/clang/test/CodeGenObjC/arc-blocks-exceptions.m b/clang/test/CodeGenObjC/arc-blocks-exceptions.m
index 821b818d4027dd..54b043d8ea0753 100644
--- a/clang/test/CodeGenObjC/arc-blocks-exceptions.m
+++ b/clang/test/CodeGenObjC/arc-blocks-exceptions.m
@@ -5,17 +5,22 @@ void test1(_Bool c) {
__weak id weakId = 0;
test1_fn(c ? ^{ (void)weakId; } : 0);
- // CHECK: [[CLEANUP_COND:%.*]] = alloca i1
- // CHECK-NEXT: [[CLEANUP_SAVE:%.*]] = alloca ptr
+ // CHECK: [[CLEANUP_SAVE:%cond-cleanup.save.*]] = alloca ptr
+ // CHECK-NEXT: [[CLEANUP_COND:%.*]] = alloca i1
+ // CHECK-NEXT: [[CLEANUP_COND1:%.*]] = alloca i1
- // CHECK: store i1 true, ptr [[CLEANUP_COND]]
- // CHECK-NEXT: store ptr {{.*}}, ptr [[CLEANUP_SAVE]]
+ // CHECK: store i1 false, ptr [[CLEANUP_COND]]
+ // CHECK-NEXT: store i1 false, ptr [[CLEANUP_COND1]]
+
+ // CHECK: store ptr {{.*}}, ptr [[CLEANUP_SAVE]]
+ // CHECK-NEXT: store i1 true, ptr [[CLEANUP_COND]]
+ // CHECK-NEXT: store i1 true, ptr [[CLEANUP_COND1]]
// CHECK: invoke void @test1_fn(
// CHECK-NEXT: to label %[[INVOKE_CONT:.*]] unwind label %[[LANDING_PAD_LAB:.*]]
// CHECK: [[INVOKE_CONT]]:
- // CHECK-NEXT: [[LOAD:%.*]] = load i1, ptr [[CLEANUP_COND]]
+ // CHECK-NEXT: [[LOAD:%.*]] = load i1, ptr [[CLEANUP_COND1]]
// CHECK-NEXT: br i1 [[LOAD]], label %[[END_OF_SCOPE_LAB:.*]], label
// CHECK: [[END_OF_SCOPE_LAB]]:
diff --git a/clang/test/CodeGenObjC/arc-blocks.m b/clang/test/CodeGenObjC/arc-blocks.m
index 105a72b4af1e1f..f718e8bbf9a615 100644
--- a/clang/test/CodeGenObjC/arc-blocks.m
+++ b/clang/test/CodeGenObjC/arc-blocks.m
@@ -445,8 +445,8 @@ void test13(id x) {
// CHECK: [[X:%.*]] = alloca ptr, align 8
// CHECK-NEXT: [[B:%.*]] = alloca ptr, align 8
// CHECK-NEXT: [[BLOCK:%.*]] = alloca [[BLOCK_T:.*]], align 8
- // CHECK-NEXT: [[CLEANUP_ACTIVE:%.*]] = alloca i1
// CHECK-NEXT: [[COND_CLEANUP_SAVE:%.*]] = alloca ptr,
+ // CHECK-NEXT: [[CLEANUP_ACTIVE:%.*]] = alloca i1
// CHECK-NEXT: [[T0:%.*]] = call ptr @llvm.objc.retain(ptr {{%.*}})
// CHECK-NEXT: store ptr [[T0]], ptr [[X]], align 8
// CHECK-NEXT: call void @llvm.lifetime.start.p0(i64 8, ptr [[B]])
@@ -460,8 +460,8 @@ void test13(id x) {
// CHECK-NEXT: [[T0:%.*]] = load ptr, ptr [[X]], align 8
// CHECK-NEXT: [[T1:%.*]] = call ptr @llvm.objc.retain(ptr [[T0]])
// CHECK-NEXT: store ptr [[T1]], ptr [[CAPTURE]], align 8
- // CHECK-NEXT: store i1 true, ptr [[CLEANUP_ACTIVE]]
// CHECK-NEXT: store ptr [[CAPTURE]], ptr [[COND_CLEANUP_SAVE]], align 8
+ // CHECK-NEXT: store i1 true, ptr [[CLEANUP_ACTIVE]]
// CHECK-NEXT: br label
// CHECK: br label
// CHECK: [[T0:%.*]] = phi ptr
>From de56bc631d63626282f61a0a926e685b66051a0a Mon Sep 17 00:00:00 2001
From: Utkarsh Saxena <usx at google.com>
Date: Mon, 29 Apr 2024 10:23:30 +0000
Subject: [PATCH 4/4] Add release notes
---
clang/docs/ReleaseNotes.rst | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/clang/docs/ReleaseNotes.rst b/clang/docs/ReleaseNotes.rst
index c19ad9fba58f37..d9212548f1305e 100644
--- a/clang/docs/ReleaseNotes.rst
+++ b/clang/docs/ReleaseNotes.rst
@@ -424,6 +424,10 @@ Bug Fixes in This Version
- Fixed an assertion failure on invalid InitListExpr in C89 mode (#GH88008).
+- Fixed missing destructor calls when we branch from middle of an expression.
+ This could happen through a branch in stmt-expr or in an expression containing a coroutine
+ suspension. Fixes (#GH63818) (#GH88478).
+
Bug Fixes to Compiler Builtins
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
More information about the cfe-commits
mailing list