[clang] df5bee6 - [CIR] Implement TryOp flattening (#183591)
via cfe-commits
cfe-commits at lists.llvm.org
Fri Feb 27 11:20:49 PST 2026
Author: Andy Kaylor
Date: 2026-02-27T11:20:43-08:00
New Revision: df5bee6afc79eb393c6dcbaad1b2ab3665f3a853
URL: https://github.com/llvm/llvm-project/commit/df5bee6afc79eb393c6dcbaad1b2ab3665f3a853
DIFF: https://github.com/llvm/llvm-project/commit/df5bee6afc79eb393c6dcbaad1b2ab3665f3a853.diff
LOG: [CIR] Implement TryOp flattening (#183591)
This updates the FlattenCFG pass to add flattening for cir::TryOp in
cases where the TryOp contains catch or unwind handlers.
Substantial amounts of this PR were created using agentic AI tools, but
I have carefully reviewed the code, comments, and tests and made changes
as needed. I've left intermediate commits in the initial PR if you'd
like to see the progression.
Added:
clang/test/CIR/Transforms/flatten-try-op.cir
Modified:
clang/lib/CIR/Dialect/Transforms/FlattenCFG.cpp
clang/test/CIR/Transforms/flatten-cleanup-scope-nyi.cir
Removed:
################################################################################
diff --git a/clang/lib/CIR/Dialect/Transforms/FlattenCFG.cpp b/clang/lib/CIR/Dialect/Transforms/FlattenCFG.cpp
index 4c13594258465..5ca98bc9a9c7a 100644
--- a/clang/lib/CIR/Dialect/Transforms/FlattenCFG.cpp
+++ b/clang/lib/CIR/Dialect/Transforms/FlattenCFG.cpp
@@ -606,6 +606,79 @@ static cir::AllocaOp getOrCreateCleanupDestSlot(cir::FuncOp funcOp,
return allocaOp;
}
+/// Shared EH flattening utilities used by both CIRCleanupScopeOpFlattening
+/// and CIRTryOpFlattening.
+
+// Collect all function calls in a region that may throw exceptions and need
+// to be replaced with try_call operations. Skips calls marked nothrow.
+// Nested cleanup scopes and try ops are always flattened before their
+// enclosing parents, so there are no nested regions to skip here.
+static void
+collectThrowingCalls(mlir::Region ®ion,
+ llvm::SmallVectorImpl<cir::CallOp> &callsToRewrite) {
+ region.walk([&](cir::CallOp callOp) {
+ if (!callOp.getNothrow())
+ callsToRewrite.push_back(callOp);
+ });
+}
+
+// Collect all cir.resume operations in a region that come from
+// already-flattened try or cleanup scope operations. These resume ops need
+// to be chained through this scope's EH handler instead of unwinding
+// directly to the caller. Nested cleanup scopes and try ops are always
+// flattened before their enclosing parents, so there are no nested regions
+// to skip here.
+static void collectResumeOps(mlir::Region ®ion,
+ llvm::SmallVectorImpl<cir::ResumeOp> &resumeOps) {
+ region.walk([&](cir::ResumeOp resumeOp) { resumeOps.push_back(resumeOp); });
+}
+
+// Replace a cir.call with a cir.try_call that unwinds to the `unwindDest`
+// block if an exception is thrown.
+static void replaceCallWithTryCall(cir::CallOp callOp, mlir::Block *unwindDest,
+ mlir::Location loc,
+ mlir::PatternRewriter &rewriter) {
+ mlir::Block *callBlock = callOp->getBlock();
+
+ assert(!callOp.getNothrow() && "call is not expected to throw");
+
+ // Split the block after the call - remaining ops become the normal
+ // destination.
+ mlir::Block *normalDest =
+ rewriter.splitBlock(callBlock, std::next(callOp->getIterator()));
+
+ // Build the try_call to replace the original call.
+ rewriter.setInsertionPoint(callOp);
+ mlir::Type resType = callOp->getNumResults() > 0
+ ? callOp->getResult(0).getType()
+ : mlir::Type();
+ auto tryCallOp =
+ cir::TryCallOp::create(rewriter, loc, callOp.getCalleeAttr(), resType,
+ normalDest, unwindDest, callOp.getArgOperands());
+
+ // Replace uses of the call result with the try_call result.
+ if (callOp->getNumResults() > 0)
+ callOp->getResult(0).replaceAllUsesWith(tryCallOp.getResult());
+
+ rewriter.eraseOp(callOp);
+}
+
+// Create a shared unwind destination block. The block contains a
+// cir.eh.initiate operation (optionally with the cleanup attribute) and a
+// branch to the given destination block, passing the eh_token.
+static mlir::Block *buildUnwindBlock(mlir::Block *dest, bool hasCleanup,
+ mlir::Location loc,
+ mlir::Block *insertBefore,
+ mlir::PatternRewriter &rewriter) {
+ mlir::Block *unwindBlock = rewriter.createBlock(insertBefore);
+ rewriter.setInsertionPointToEnd(unwindBlock);
+ auto ehInitiate =
+ cir::EhInitiateOp::create(rewriter, loc, /*cleanup=*/hasCleanup);
+ cir::BrOp::create(rewriter, loc, mlir::ValueRange{ehInitiate.getEhToken()},
+ dest);
+ return unwindBlock;
+}
+
class CIRCleanupScopeOpFlattening
: public mlir::OpRewritePattern<cir::CleanupScopeOp> {
public:
@@ -883,45 +956,6 @@ class CIRCleanupScopeOpFlattening
});
}
- // Collect all cir.resume operations in the body region that come from
- // already-flattened try or cleanup scope operations that were nested within
- // this cleanup scope. These resume ops need to be chained through this
- // cleanup's EH handler instead of unwinding directly to the caller.
- void collectResumeOps(mlir::Region &bodyRegion,
- llvm::SmallVectorImpl<cir::ResumeOp> &resumeOps) const {
- bodyRegion.walk<mlir::WalkOrder::PreOrder>([&](mlir::Operation *op) {
- // Skip resume ops inside nested TryOps - those are handled by TryOp
- // flattening.
- if (isa<cir::TryOp>(op))
- return mlir::WalkResult::skip();
-
- if (auto resumeOp = dyn_cast<cir::ResumeOp>(op))
- resumeOps.push_back(resumeOp);
- return mlir::WalkResult::advance();
- });
- }
-
- // Collect all function calls in the cleanup scope body that may throw
- // exceptions and need to be replaced with try_call operations. Skips calls
- // that are marked nothrow and calls inside nested TryOps (the latter will be
- // handled by the TryOp's own flattening).
- void collectThrowingCalls(
- mlir::Region &bodyRegion,
- llvm::SmallVectorImpl<cir::CallOp> &callsToRewrite) const {
- bodyRegion.walk<mlir::WalkOrder::PreOrder>([&](mlir::Operation *op) {
- // Skip calls inside nested TryOps - those are handled by TryOp
- // flattening.
- if (isa<cir::TryOp>(op))
- return mlir::WalkResult::skip();
-
- if (auto callOp = dyn_cast<cir::CallOp>(op)) {
- if (!callOp.getNothrow())
- callsToRewrite.push_back(callOp);
- }
- return mlir::WalkResult::advance();
- });
- }
-
#ifndef NDEBUG
// Check that no block other than the last one in a region exits the region.
static bool regionExitsOnlyFromLastBlock(mlir::Region ®ion) {
@@ -1051,51 +1085,6 @@ class CIRCleanupScopeOpFlattening
return clonedEntry;
}
- // Create a shared unwind destination block for all calls within the same
- // cleanup scope. The unwind block contains a cir.eh.initiate operation
- // (with the cleanup attribute) and a branch to the EH cleanup block.
- mlir::Block *buildUnwindBlock(mlir::Block *ehCleanupBlock, mlir::Location loc,
- mlir::Block *insertBefore,
- mlir::PatternRewriter &rewriter) const {
- mlir::Block *unwindBlock = rewriter.createBlock(insertBefore);
- rewriter.setInsertionPointToEnd(unwindBlock);
- auto ehInitiate =
- cir::EhInitiateOp::create(rewriter, loc, /*cleanup=*/true);
- cir::BrOp::create(rewriter, loc, mlir::ValueRange{ehInitiate.getEhToken()},
- ehCleanupBlock);
- return unwindBlock;
- }
-
- // Replace a cir.call with a cir.try_call that unwinds to the `unwindDest`
- // block if an exception is thrown.
- void replaceCallWithTryCall(cir::CallOp callOp, mlir::Block *unwindDest,
- mlir::Location loc,
- mlir::PatternRewriter &rewriter) const {
- mlir::Block *callBlock = callOp->getBlock();
-
- assert(!callOp.getNothrow() && "call is not expected to throw");
-
- // Split the block after the call - remaining ops become the normal
- // destination.
- mlir::Block *normalDest =
- rewriter.splitBlock(callBlock, std::next(callOp->getIterator()));
-
- // Build the try_call to replace the original call.
- rewriter.setInsertionPoint(callOp);
- mlir::Type resType = callOp->getNumResults() > 0
- ? callOp->getResult(0).getType()
- : mlir::Type();
- auto tryCallOp =
- cir::TryCallOp::create(rewriter, loc, callOp.getCalleeAttr(), resType,
- normalDest, unwindDest, callOp.getArgOperands());
-
- // Replace uses of the call result with the try_call result.
- if (callOp->getNumResults() > 0)
- callOp->getResult(0).replaceAllUsesWith(tryCallOp.getResult());
-
- rewriter.eraseOp(callOp);
- }
-
// Flatten a cleanup scope. The body region's exits branch to the cleanup
// block, and the cleanup block branches to destination blocks whose contents
// depend on the type of operation that exited the body region. Yield becomes
@@ -1180,8 +1169,8 @@ class CIRCleanupScopeOpFlattening
// need a shared unwind destination. Resume ops from inner cleanups
// branch directly to the EH cleanup entry.
if (!callsToRewrite.empty())
- unwindBlock =
- buildUnwindBlock(ehCleanupEntry, loc, ehCleanupEntry, rewriter);
+ unwindBlock = buildUnwindBlock(ehCleanupEntry, /*hasCleanup=*/true, loc,
+ ehCleanupEntry, rewriter);
}
// All normal flow blocks are inserted before this point — either before
@@ -1385,106 +1374,244 @@ class CIRTryOpFlattening : public mlir::OpRewritePattern<cir::TryOp> {
public:
using OpRewritePattern<cir::TryOp>::OpRewritePattern;
- mlir::Block *buildTryBody(cir::TryOp tryOp,
- mlir::PatternRewriter &rewriter) const {
- // Split the current block before the TryOp to create the inlining
- // point.
- mlir::Block *beforeTryScopeBlock = rewriter.getInsertionBlock();
- mlir::Block *afterTry =
- rewriter.splitBlock(beforeTryScopeBlock, rewriter.getInsertionPoint());
+ // Build the catch dispatch block with a cir.eh.dispatch operation.
+ // The dispatch block receives an !cir.eh_token argument and dispatches
+ // to the appropriate catch handler blocks based on exception types.
+ mlir::Block *buildCatchDispatchBlock(
+ cir::TryOp tryOp, mlir::ArrayAttr handlerTypes,
+ llvm::SmallVectorImpl<mlir::Block *> &catchHandlerBlocks,
+ mlir::Location loc, mlir::Block *insertBefore,
+ mlir::PatternRewriter &rewriter) const {
+ mlir::Block *dispatchBlock = rewriter.createBlock(insertBefore);
+ auto ehTokenType = cir::EhTokenType::get(rewriter.getContext());
+ mlir::Value ehToken = dispatchBlock->addArgument(ehTokenType, loc);
+
+ rewriter.setInsertionPointToEnd(dispatchBlock);
+
+ // Build the catch types and destinations for the dispatch.
+ llvm::SmallVector<mlir::Attribute> catchTypeAttrs;
+ llvm::SmallVector<mlir::Block *> catchDests;
+ mlir::Block *defaultDest = nullptr;
+ bool defaultIsCatchAll = false;
+
+ for (auto [typeAttr, handlerBlock] :
+ llvm::zip(handlerTypes, catchHandlerBlocks)) {
+ if (mlir::isa<cir::CatchAllAttr>(typeAttr)) {
+ assert(!defaultDest && "multiple catch_all or unwind handlers");
+ defaultDest = handlerBlock;
+ defaultIsCatchAll = true;
+ } else if (mlir::isa<cir::UnwindAttr>(typeAttr)) {
+ assert(!defaultDest && "multiple catch_all or unwind handlers");
+ defaultDest = handlerBlock;
+ defaultIsCatchAll = false;
+ } else {
+ // This is a typed catch handler (GlobalViewAttr with type info).
+ catchTypeAttrs.push_back(typeAttr);
+ catchDests.push_back(handlerBlock);
+ }
+ }
- // Inline body region.
- mlir::Block *beforeBody = &tryOp.getTryRegion().front();
- rewriter.inlineRegionBefore(tryOp.getTryRegion(), afterTry);
+ assert(defaultDest && "dispatch must have a catch_all or unwind handler");
- // Branch into the body of the region.
- rewriter.setInsertionPointToEnd(beforeTryScopeBlock);
- cir::BrOp::create(rewriter, tryOp.getLoc(), mlir::ValueRange(), beforeBody);
- return afterTry;
- }
+ mlir::ArrayAttr catchTypesArrayAttr;
+ if (!catchTypeAttrs.empty())
+ catchTypesArrayAttr = rewriter.getArrayAttr(catchTypeAttrs);
- void buildHandlers(cir::TryOp tryOp, mlir::PatternRewriter &rewriter,
- mlir::Block *afterBody, mlir::Block *afterTry,
- SmallVectorImpl<cir::CallOp> &callsToRewrite,
- SmallVectorImpl<mlir::Block *> &landingPads) const {
- // Replace the tryOp return with a branch that jumps out of the body.
- rewriter.setInsertionPointToEnd(afterBody);
+ cir::EhDispatchOp::create(rewriter, loc, ehToken, catchTypesArrayAttr,
+ defaultIsCatchAll, defaultDest, catchDests);
- mlir::Block *beforeCatch = rewriter.getInsertionBlock();
- rewriter.setInsertionPointToEnd(beforeCatch);
+ return dispatchBlock;
+ }
- // Check if the terminator is a YieldOp because there could be another
- // terminator, e.g. unreachable
- if (auto tryBodyYield = dyn_cast<cir::YieldOp>(afterBody->getTerminator()))
- rewriter.replaceOpWithNewOp<cir::BrOp>(tryBodyYield, afterTry);
+ // Flatten a single catch handler region. Each handler region has an
+ // !cir.eh_token argument and starts with cir.begin_catch, followed by
+ // a cir.cleanup.scope containing the handler body (with cir.end_catch in
+ // its cleanup region), and ending with cir.yield.
+ //
+ // After flattening, the handler region becomes a block that receives the
+ // eh_token, calls begin_catch, runs the handler body inline, calls
+ // end_catch, and branches to the continue block.
+ //
+ // The cleanup scope inside the catch handler is expected to have been
+ // flattened before we get here, so what we see in the handler region is
+ // already flat code with begin_catch at the top and end_catch in any place
+ // that we would exit the catch handler. We just need to inline the region
+ // and fix up terminators.
+ mlir::Block *flattenCatchHandler(mlir::Region &handlerRegion,
+ mlir::Block *continueBlock,
+ mlir::Location loc,
+ mlir::Block *insertBefore,
+ mlir::PatternRewriter &rewriter) const {
+ // The handler region entry block has the !cir.eh_token argument.
+ mlir::Block *handlerEntry = &handlerRegion.front();
+
+ // Inline the handler region before insertBefore.
+ rewriter.inlineRegionBefore(handlerRegion, insertBefore);
+
+ // Replace yield terminators in the handler with branches to continue.
+ for (mlir::Block &block : llvm::make_range(handlerEntry->getIterator(),
+ insertBefore->getIterator())) {
+ if (auto yieldOp = dyn_cast<cir::YieldOp>(block.getTerminator())) {
+ // Verify that end_catch is the last non-branch operation before
+ // this yield. After cleanup scope flattening, end_catch may be in
+ // a predecessor block rather than immediately before the yield.
+ // Walk back through the single-predecessor chain, verifying that
+ // each intermediate block contains only a branch terminator, until
+ // we find end_catch as the last non-terminator in some block.
+ assert([&]() {
+ // Check if end_catch immediately precedes the yield.
+ if (mlir::Operation *prev = yieldOp->getPrevNode())
+ return isa<cir::EndCatchOp>(prev);
+ // The yield is alone in its block. Walk backward through
+ // single-predecessor blocks that contain only a branch.
+ mlir::Block *b = block.getSinglePredecessor();
+ while (b) {
+ mlir::Operation *term = b->getTerminator();
+ if (mlir::Operation *prev = term->getPrevNode())
+ return isa<cir::EndCatchOp>(prev);
+ if (!isa<cir::BrOp>(term))
+ return false;
+ b = b->getSinglePredecessor();
+ }
+ return false;
+ }() && "expected end_catch as last operation before yield "
+ "in catch handler, with only branches in between");
+ rewriter.setInsertionPoint(yieldOp);
+ rewriter.replaceOpWithNewOp<cir::BrOp>(yieldOp, continueBlock);
+ }
+ }
- mlir::ArrayAttr handlers = tryOp.getHandlerTypesAttr();
- if (!handlers || handlers.empty())
- return;
+ return handlerEntry;
+ }
- llvm_unreachable("TryOpFlattening buildHandlers with CallsOp is NYI");
+ // Flatten an unwind handler region. The unwind region just contains a
+ // cir.resume that continues unwinding. We inline it and leave the resume
+ // in place. If this try op is nested inside an EH cleanup or another try op,
+ // the enclosing op will rewrite the resume as a branch to its cleanup or
+ // dispatch block when it is flattened. Otherwise, the resume will unwind to
+ // the caller.
+ mlir::Block *flattenUnwindHandler(mlir::Region &unwindRegion,
+ mlir::Location loc,
+ mlir::Block *insertBefore,
+ mlir::PatternRewriter &rewriter) const {
+ mlir::Block *unwindEntry = &unwindRegion.front();
+ rewriter.inlineRegionBefore(unwindRegion, insertBefore);
+ return unwindEntry;
}
mlir::LogicalResult
matchAndRewrite(cir::TryOp tryOp,
mlir::PatternRewriter &rewriter) const override {
- // Cleanup scopes must be lowered before the enclosing try so that
- // EH cleanup inside them is properly handled.
- // Fail the match so the pattern rewriter will process cleanup scopes first.
- bool hasNestedCleanup = tryOp
- ->walk([&](cir::CleanupScopeOp) {
- return mlir::WalkResult::interrupt();
- })
- .wasInterrupted();
- if (hasNestedCleanup)
+ // Nested try ops and cleanup scopes must be flattened before the enclosing
+ // try so that EH cleanup inside them is properly handled. Fail the match so
+ // the pattern rewriter will process nested ops first.
+ bool hasNestedOps =
+ tryOp
+ ->walk([&](mlir::Operation *op) {
+ if (isa<cir::CleanupScopeOp, cir::TryOp>(op) && op != tryOp)
+ return mlir::WalkResult::interrupt();
+ return mlir::WalkResult::advance();
+ })
+ .wasInterrupted();
+ if (hasNestedOps)
return mlir::failure();
- mlir::ArrayAttr handlers = tryOp.getHandlerTypesAttr();
- if (handlers && !handlers.empty())
- return tryOp->emitError(
- "TryOp flattening with handlers is not yet implemented");
-
mlir::OpBuilder::InsertionGuard guard(rewriter);
- mlir::Block *afterBody = &tryOp.getTryRegion().back();
-
- // Grab the collection of `cir.call exception`s to rewrite to
- // `cir.try_call`.
- llvm::SmallVector<cir::CallOp, 4> callsToRewrite;
- tryOp.getTryRegion().walk([&](CallOp op) {
- if (op.getNothrow())
- return;
-
- // Only grab calls within immediate closest TryOp scope.
- if (op->getParentOfType<cir::TryOp>() != tryOp)
- return;
- callsToRewrite.push_back(op);
- });
+ mlir::Location loc = tryOp.getLoc();
- if (!callsToRewrite.empty())
- llvm_unreachable(
- "TryOpFlattening with try block that contains CallOps is NYI");
+ mlir::ArrayAttr handlerTypes = tryOp.getHandlerTypesAttr();
+ mlir::MutableArrayRef<mlir::Region> handlerRegions =
+ tryOp.getHandlerRegions();
- // Build try body.
- mlir::Block *afterTry = buildTryBody(tryOp, rewriter);
+ // Collect throwing calls in the try body.
+ llvm::SmallVector<cir::CallOp> callsToRewrite;
+ collectThrowingCalls(tryOp.getTryRegion(), callsToRewrite);
- // Build handlers.
- llvm::SmallVector<mlir::Block *, 4> landingPads;
- buildHandlers(tryOp, rewriter, afterBody, afterTry, callsToRewrite,
- landingPads);
+ // Collect resume ops from already-flattened cleanup scopes in the try body.
+ llvm::SmallVector<cir::ResumeOp> resumeOpsToChain;
+ collectResumeOps(tryOp.getTryRegion(), resumeOpsToChain);
- rewriter.eraseOp(tryOp);
+ // Split the current block and inline the try body.
+ mlir::Block *currentBlock = rewriter.getInsertionBlock();
+ mlir::Block *continueBlock =
+ rewriter.splitBlock(currentBlock, rewriter.getInsertionPoint());
- assert((landingPads.size() == callsToRewrite.size()) &&
- "expected matching number of entries");
+ // Get references to try body blocks before inlining.
+ mlir::Block *bodyEntry = &tryOp.getTryRegion().front();
+ mlir::Block *bodyExit = &tryOp.getTryRegion().back();
- // Quick block cleanup: no indirection to the post try block.
- auto brOp = dyn_cast<cir::BrOp>(afterTry->getTerminator());
- if (brOp && brOp.getDest()->hasNoPredecessors()) {
- mlir::Block *srcBlock = brOp.getDest();
- rewriter.eraseOp(brOp);
- rewriter.mergeBlocks(srcBlock, afterTry);
+ // Inline the try body region before the continue block.
+ rewriter.inlineRegionBefore(tryOp.getTryRegion(), continueBlock);
+
+ // Branch from the current block to the body entry.
+ rewriter.setInsertionPointToEnd(currentBlock);
+ cir::BrOp::create(rewriter, loc, bodyEntry);
+
+ // Replace the try body's yield terminator with a branch to continue.
+ if (auto bodyYield = dyn_cast<cir::YieldOp>(bodyExit->getTerminator())) {
+ rewriter.setInsertionPoint(bodyYield);
+ rewriter.replaceOpWithNewOp<cir::BrOp>(bodyYield, continueBlock);
}
+ // If there are no handlers, we're done.
+ if (!handlerTypes || handlerTypes.empty()) {
+ rewriter.eraseOp(tryOp);
+ return mlir::success();
+ }
+
+ // Build the catch handler blocks.
+
+ // First, flatten all handler regions and collect the entry blocks.
+ llvm::SmallVector<mlir::Block *> catchHandlerBlocks;
+
+ for (const auto &[idx, typeAttr] : llvm::enumerate(handlerTypes)) {
+ mlir::Region &handlerRegion = handlerRegions[idx];
+
+ if (mlir::isa<cir::UnwindAttr>(typeAttr)) {
+ mlir::Block *unwindEntry =
+ flattenUnwindHandler(handlerRegion, loc, continueBlock, rewriter);
+ catchHandlerBlocks.push_back(unwindEntry);
+ } else {
+ mlir::Block *handlerEntry = flattenCatchHandler(
+ handlerRegion, continueBlock, loc, continueBlock, rewriter);
+ catchHandlerBlocks.push_back(handlerEntry);
+ }
+ }
+
+ // Build the catch dispatch block.
+ mlir::Block *dispatchBlock =
+ buildCatchDispatchBlock(tryOp, handlerTypes, catchHandlerBlocks, loc,
+ catchHandlerBlocks.front(), rewriter);
+
+ // Build a block to be the unwind desination for throwing calls and replace
+ // the calls with try_call ops. Note that the unwind block created here is
+ // something
diff erent than the unwind handler that we may have created
+ // above. The unwind handler continues unwinding after uncaught exceptions.
+ // This is the block that will eventually become the landing pad for invoke
+ // instructions.
+ bool hasCleanup = tryOp.getCleanup();
+ if (!callsToRewrite.empty()) {
+ // Create a shared unwind block for all throwing calls.
+ mlir::Block *unwindBlock = buildUnwindBlock(dispatchBlock, hasCleanup,
+ loc, dispatchBlock, rewriter);
+
+ for (cir::CallOp callOp : callsToRewrite)
+ replaceCallWithTryCall(callOp, unwindBlock, loc, rewriter);
+ }
+
+ // Chain resume ops from inner cleanup scopes.
+ // Resume ops from already-flattened cleanup scopes within the try body
+ // should branch to the catch dispatch block instead of unwinding directly.
+ for (cir::ResumeOp resumeOp : resumeOpsToChain) {
+ mlir::Value ehToken = resumeOp.getEhToken();
+ rewriter.setInsertionPoint(resumeOp);
+ rewriter.replaceOpWithNewOp<cir::BrOp>(
+ resumeOp, mlir::ValueRange{ehToken}, dispatchBlock);
+ }
+
+ // Finally, erase the original try op ----
+ rewriter.eraseOp(tryOp);
+
return mlir::success();
}
};
diff --git a/clang/test/CIR/Transforms/flatten-cleanup-scope-nyi.cir b/clang/test/CIR/Transforms/flatten-cleanup-scope-nyi.cir
index 694a17f1568ef..63ed793b9cfd9 100644
--- a/clang/test/CIR/Transforms/flatten-cleanup-scope-nyi.cir
+++ b/clang/test/CIR/Transforms/flatten-cleanup-scope-nyi.cir
@@ -43,67 +43,6 @@ cir.func @test_all_cleanup_in_try() {
cir.return
}
-// Test that we issue a diagnostic for an EH cleanup nested in a try with a
-// catch all handlers.
-cir.func @test_eh_cleanup_in_try_catchall() {
- %0 = cir.alloca !rec_SomeClass, !cir.ptr<!rec_SomeClass>, ["c", init] {alignment = 4 : i64}
- cir.call @ctor(%0) : (!cir.ptr<!rec_SomeClass>) -> ()
- // expected-error @below {{TryOp flattening with handlers is not yet implemented}}
- cir.try {
- cir.cleanup.scope {
- cir.call @doSomething(%0) : (!cir.ptr<!rec_SomeClass>) -> ()
- cir.yield
- } cleanup all {
- cir.call @dtor(%0) nothrow : (!cir.ptr<!rec_SomeClass>) -> ()
- cir.yield
- }
- cir.yield
- } catch all (%eh_token : !cir.eh_token) {
- %catch_token, %1 = cir.begin_catch %eh_token : !cir.eh_token -> (!cir.catch_token, !cir.ptr<!void>)
- cir.cleanup.scope {
- cir.yield
- } cleanup eh {
- cir.end_catch %catch_token : !cir.catch_token
- cir.yield
- }
- cir.yield
- }
- cir.return
-}
-
-// Test that we issue a diagnostic for an EH cleanup nested in a try with a
-// catch and unwind handlers.
-cir.func @test_eh_cleanup_in_try_catch_unwind() {
- %0 = cir.alloca !rec_SomeClass, !cir.ptr<!rec_SomeClass>, ["c", init] {alignment = 4 : i64}
- %1 = cir.alloca !s32i, !cir.ptr<!s32i>, ["e"] {alignment = 4 : i64}
- cir.call @ctor(%0) : (!cir.ptr<!rec_SomeClass>) -> ()
- // expected-error @below {{TryOp flattening with handlers is not yet implemented}}
- cir.try {
- cir.cleanup.scope {
- cir.call @doSomething(%0) : (!cir.ptr<!rec_SomeClass>) -> ()
- cir.yield
- } cleanup all {
- cir.call @dtor(%0) nothrow : (!cir.ptr<!rec_SomeClass>) -> ()
- cir.yield
- }
- cir.yield
- } catch [type #cir.global_view<@_ZTIi> : !cir.ptr<!u8i>] (%eh_token : !cir.eh_token) {
- %catch_token, %2 = cir.begin_catch %eh_token : !cir.eh_token -> (!cir.catch_token, !cir.ptr<!s32i>)
- cir.cleanup.scope {
- %3 = cir.load align(4) %2 : !cir.ptr<!s32i>, !s32i
- cir.store align(4) %3, %1 : !s32i, !cir.ptr<!s32i>
- cir.yield
- } cleanup eh {
- cir.end_catch %catch_token : !cir.catch_token
- cir.yield
- }
- cir.yield
- } unwind (%eh_token_1 : !cir.eh_token) {
- cir.resume %eh_token_1 : !cir.eh_token
- }
- cir.return
-}
-
// Test that we issue a diagnostic for throwing calls in the cleanup region
// of a nested EH cleanup scope (the dtor is not nothrow).
cir.func @test_nested_eh_cleanup() {
@@ -205,35 +144,6 @@ cir.func @test_goto_in_nested_cleanup() {
cir.return
}
-// Test that a try op with handlers nested inside a cleanup scope produces
-// a diagnostic. The cleanup scope defers to the try op, which then fails
-// because handler flattening is not yet implemented.
-cir.func @test_try_with_handlers_in_cleanup() {
- %0 = cir.alloca !rec_SomeClass, !cir.ptr<!rec_SomeClass>, ["c", init] {alignment = 4 : i64}
- cir.call @ctor(%0) : (!cir.ptr<!rec_SomeClass>) -> ()
- cir.cleanup.scope {
- // expected-error @below {{TryOp flattening with handlers is not yet implemented}}
- cir.try {
- cir.call @doSomething(%0) : (!cir.ptr<!rec_SomeClass>) -> ()
- cir.yield
- } catch all (%eh_token : !cir.eh_token) {
- %catch_token, %1 = cir.begin_catch %eh_token : !cir.eh_token -> (!cir.catch_token, !cir.ptr<!void>)
- cir.cleanup.scope {
- cir.yield
- } cleanup eh {
- cir.end_catch %catch_token : !cir.catch_token
- cir.yield
- }
- cir.yield
- }
- cir.yield
- } cleanup all {
- cir.call @dtor(%0) nothrow : (!cir.ptr<!rec_SomeClass>) -> ()
- cir.yield
- }
- cir.return
-}
-
// Test that we issue a diagnostic for throwing calls in the cleanup region
// of an EH cleanup scope.
cir.func @test_throwing_call_in_eh_cleanup() {
diff --git a/clang/test/CIR/Transforms/flatten-try-op.cir b/clang/test/CIR/Transforms/flatten-try-op.cir
new file mode 100644
index 0000000000000..d761b602f1b12
--- /dev/null
+++ b/clang/test/CIR/Transforms/flatten-try-op.cir
@@ -0,0 +1,737 @@
+// RUN: cir-opt %s -cir-flatten-cfg -o %t.cir
+// RUN: FileCheck --input-file=%t.cir %s
+
+!s32i = !cir.int<s, 32>
+!u8i = !cir.int<u, 8>
+!void = !cir.void
+!rec_SomeClass = !cir.record<struct "SomeClass" {!s32i}>
+!rec_exception = !cir.record<struct "std::exception" {!s32i}>
+
+// Test a simple try with no handlers and no throwing calls.
+cir.func @test_try_no_handlers() {
+ %0 = cir.alloca !s32i, !cir.ptr<!s32i>, ["a", init] {alignment = 4 : i64}
+ cir.scope {
+ cir.try {
+ %1 = cir.const #cir.int<1> : !s32i
+ cir.store %1, %0 : !s32i, !cir.ptr<!s32i>
+ cir.yield
+ }
+ }
+ cir.return
+}
+
+// CHECK-LABEL: cir.func @test_try_no_handlers()
+// CHECK: %[[ALLOCA:.*]] = cir.alloca !s32i
+// CHECK: cir.br ^[[SCOPE:bb[0-9]+]]
+// CHECK: ^[[SCOPE]]:
+// CHECK: cir.br ^[[TRY_BODY:bb[0-9]+]]
+// CHECK: ^[[TRY_BODY]]:
+// CHECK: %[[C1:.*]] = cir.const #cir.int<1> : !s32i
+// CHECK: cir.store %[[C1]], %[[ALLOCA]]
+// CHECK: cir.br ^[[CONTINUE:bb[0-9]+]]
+// CHECK: ^[[CONTINUE]]:
+// CHECK: cir.br ^[[SCOPE_EXIT:bb[0-9]+]]
+// CHECK: ^[[SCOPE_EXIT]]:
+// CHECK: cir.return
+
+// Test try-catch with catch all, throwing call in try body.
+// The throwing call becomes try_call, and we get an unwind block,
+// catch dispatch, and a catch-all handler.
+cir.func @test_try_catch_all() {
+ cir.scope {
+ cir.try {
+ cir.call @mayThrow() : () -> ()
+ cir.yield
+ } catch all (%eh_token : !cir.eh_token) {
+ %catch_token, %exn_ptr = cir.begin_catch %eh_token : !cir.eh_token -> (!cir.catch_token, !cir.ptr<!cir.void>)
+ cir.end_catch %catch_token : !cir.catch_token
+ cir.yield
+ }
+ }
+ cir.return
+}
+
+// CHECK-LABEL: cir.func @test_try_catch_all()
+// CHECK: cir.br ^[[SCOPE:bb[0-9]+]]
+//
+// CHECK: ^[[SCOPE]]:
+// CHECK: cir.br ^[[TRY_BODY:bb[0-9]+]]
+//
+// CHECK: ^[[TRY_BODY]]:
+// CHECK: cir.try_call @mayThrow() ^[[NORMAL:bb[0-9]+]], ^[[UNWIND:bb[0-9]+]]
+//
+// CHECK: ^[[NORMAL]]:
+// CHECK: cir.br ^[[CONTINUE:bb[0-9]+]]
+//
+// CHECK: ^[[UNWIND]]:
+// CHECK: %[[EH_TOK:.*]] = cir.eh.initiate : !cir.eh_token
+// CHECK: cir.br ^[[DISPATCH:bb[0-9]+]](%[[EH_TOK]] : !cir.eh_token)
+//
+// CHECK: ^[[DISPATCH]](%[[DISP_ET:.*]]: !cir.eh_token):
+// CHECK: cir.eh.dispatch %[[DISP_ET]] : !cir.eh_token [
+// CHECK: catch_all : ^[[CATCH_ALL:bb[0-9]+]]
+// CHECK: ]
+//
+// CHECK: ^[[CATCH_ALL]](%[[ET:.*]]: !cir.eh_token):
+// CHECK: %[[CT:.*]], %[[EXN:.*]] = cir.begin_catch %[[ET]] : !cir.eh_token -> (!cir.catch_token, !cir.ptr<!void>)
+// CHECK: cir.end_catch %[[CT]] : !cir.catch_token
+// CHECK: cir.br ^[[CONTINUE]]
+//
+// CHECK: ^[[CONTINUE]]:
+// CHECK: cir.br ^[[SCOPE_EXIT:bb[0-9]+]]
+// CHECK: ^[[SCOPE_EXIT]]:
+// CHECK: cir.return
+
+// Test try-catch with typed handler and unwind.
+// If the exception type doesn't match, control goes to the unwind handler
+// which resumes unwinding.
+cir.func @test_try_catch_typed_with_unwind() {
+ %0 = cir.alloca !cir.ptr<!rec_exception>, !cir.ptr<!cir.ptr<!rec_exception>>, ["e"] {alignment = 8 : i64}
+ cir.scope {
+ cir.try {
+ cir.call @mayThrow() : () -> ()
+ cir.yield
+ } catch [type #cir.global_view<@_ZTISt9exception> : !cir.ptr<!u8i>] (%eh_token : !cir.eh_token) {
+ %catch_token, %1 = cir.begin_catch %eh_token : !cir.eh_token -> (!cir.catch_token, !cir.ptr<!cir.ptr<!rec_exception>>)
+ cir.end_catch %catch_token : !cir.catch_token
+ cir.yield
+ } unwind (%eh_token : !cir.eh_token) {
+ cir.resume %eh_token : !cir.eh_token
+ }
+ }
+ cir.return
+}
+
+// CHECK-LABEL: cir.func @test_try_catch_typed_with_unwind()
+// CHECK: %[[E_ALLOCA:.*]] = cir.alloca !cir.ptr<!rec_std3A3Aexception>
+// CHECK: cir.br ^[[SCOPE:bb[0-9]+]]
+//
+// CHECK: ^[[SCOPE]]:
+// CHECK: cir.br ^[[TRY_BODY:bb[0-9]+]]
+//
+// CHECK: ^[[TRY_BODY]]:
+// CHECK: cir.try_call @mayThrow() ^[[NORMAL:bb[0-9]+]], ^[[UNWIND:bb[0-9]+]]
+//
+// CHECK: ^[[NORMAL]]:
+// CHECK: cir.br ^[[CONTINUE:bb[0-9]+]]
+//
+// CHECK: ^[[UNWIND]]:
+// CHECK: %[[EH_TOK:.*]] = cir.eh.initiate : !cir.eh_token
+// CHECK: cir.br ^[[DISPATCH:bb[0-9]+]](%[[EH_TOK]] : !cir.eh_token)
+//
+// CHECK: ^[[DISPATCH]](%[[DISP_ET:.*]]: !cir.eh_token):
+// CHECK: cir.eh.dispatch %[[DISP_ET]] : !cir.eh_token [
+// CHECK: catch(#cir.global_view<@_ZTISt9exception> : !cir.ptr<!u8i>) : ^[[CATCH_TYPED:bb[0-9]+]],
+// CHECK: unwind : ^[[UNWIND_HANDLER:bb[0-9]+]]
+// CHECK: ]
+//
+// CHECK: ^[[CATCH_TYPED]](%[[CT_ET:.*]]: !cir.eh_token):
+// CHECK: %[[CT:.*]], %[[EXN:.*]] = cir.begin_catch %[[CT_ET]] : !cir.eh_token -> (!cir.catch_token, !cir.ptr<!cir.ptr<!rec_std3A3Aexception>>)
+// CHECK: cir.end_catch %[[CT]] : !cir.catch_token
+// CHECK: cir.br ^[[CONTINUE]]
+//
+// CHECK: ^[[UNWIND_HANDLER]](%[[UW_ET:.*]]: !cir.eh_token):
+// CHECK: cir.resume %[[UW_ET]] : !cir.eh_token
+//
+// CHECK: ^[[CONTINUE]]:
+// CHECK: cir.br ^{{.*}}
+// CHECK: cir.return
+
+// Test try-catch with cleanup inside the try body.
+// The cleanup scope is flattened first. The inner EH cleanup resume ops
+// are chained to the catch dispatch block of the enclosing try.
+cir.func @test_try_catch_with_cleanup() {
+ cir.scope {
+ %0 = cir.alloca !rec_SomeClass, !cir.ptr<!rec_SomeClass>, ["c", init] {alignment = 4 : i64}
+ cir.try {
+ cir.call @ctor(%0) : (!cir.ptr<!rec_SomeClass>) -> ()
+ cir.cleanup.scope {
+ cir.call @doSomething(%0) : (!cir.ptr<!rec_SomeClass>) -> ()
+ cir.yield
+ } cleanup all {
+ cir.call @dtor(%0) nothrow : (!cir.ptr<!rec_SomeClass>) -> ()
+ cir.yield
+ }
+ cir.yield
+ } catch all (%eh_token : !cir.eh_token) {
+ %catch_token, %1 = cir.begin_catch %eh_token : !cir.eh_token -> (!cir.catch_token, !cir.ptr<!cir.void>)
+ cir.end_catch %catch_token : !cir.catch_token
+ cir.yield
+ }
+ }
+ cir.return
+}
+
+// CHECK-LABEL: cir.func @test_try_catch_with_cleanup()
+// CHECK: %[[C:.*]] = cir.alloca !rec_SomeClass
+// CHECK: cir.br ^[[TRY_BODY:bb[0-9]+]]
+//
+// CHECK: ^[[TRY_BODY]]:
+// CHECK: cir.try_call @ctor(%[[C]]) ^[[AFTER_CTOR:bb[0-9]+]], ^[[OUTER_UNWIND:bb[0-9]+]]
+//
+// CHECK: ^[[AFTER_CTOR]]:
+// CHECK: cir.br ^[[CLEANUP_BODY:bb[0-9]+]]
+//
+// CHECK: ^[[CLEANUP_BODY]]:
+// CHECK: cir.try_call @doSomething(%[[C]]) ^[[AFTER_DO:bb[0-9]+]], ^[[INNER_UNWIND:bb[0-9]+]]
+//
+// CHECK: ^[[AFTER_DO]]:
+// CHECK: cir.br ^[[NORMAL_CLEANUP:bb[0-9]+]]
+//
+// CHECK: ^[[NORMAL_CLEANUP]]:
+// CHECK: cir.call @dtor(%[[C]]) nothrow
+// CHECK: cir.br ^[[CLEANUP_EXIT:bb[0-9]+]]
+//
+// CHECK: ^[[CLEANUP_EXIT]]:
+// CHECK: cir.br ^[[TRY_EXIT:bb[0-9]+]]
+//
+// CHECK: ^[[INNER_UNWIND]]:
+// CHECK: %[[INNER_EH:.*]] = cir.eh.initiate cleanup : !cir.eh_token
+// CHECK: cir.br ^[[EH_CLEANUP:bb[0-9]+]](%[[INNER_EH]] : !cir.eh_token)
+//
+// CHECK: ^[[EH_CLEANUP]](%[[EH_CT:.*]]: !cir.eh_token):
+// CHECK: %[[CT:.*]] = cir.begin_cleanup %[[EH_CT]] : !cir.eh_token -> !cir.cleanup_token
+// CHECK: cir.call @dtor(%[[C]]) nothrow
+// CHECK: cir.end_cleanup %[[CT]] : !cir.cleanup_token
+// CHECK: cir.br ^[[DISPATCH:bb[0-9]+]](%[[EH_CT]] : !cir.eh_token)
+//
+// CHECK: ^[[TRY_EXIT]]:
+// CHECK: cir.br ^[[CONTINUE:bb[0-9]+]]
+//
+// CHECK: ^[[OUTER_UNWIND]]:
+// CHECK: %[[CTOR_EH:.*]] = cir.eh.initiate : !cir.eh_token
+// CHECK: cir.br ^[[DISPATCH]](%[[CTOR_EH]] : !cir.eh_token)
+//
+// CHECK: ^[[DISPATCH]](%[[DISP_ET:.*]]: !cir.eh_token):
+// CHECK: cir.eh.dispatch %[[DISP_ET]] : !cir.eh_token [
+// CHECK: catch_all : ^[[CATCH_ALL:bb[0-9]+]]
+// CHECK: ]
+//
+// CHECK: ^[[CATCH_ALL]](%[[CA_ET:.*]]: !cir.eh_token):
+// CHECK: %{{.*}}, %{{.*}} = cir.begin_catch %[[CA_ET]]
+// CHECK: cir.end_catch
+// CHECK: cir.br ^[[CONTINUE]]
+//
+// CHECK: ^[[CONTINUE]]:
+// CHECK: cir.br ^{{.*}}
+// CHECK: cir.return
+
+// Test try-catch within a cleanup scope.
+// The try is nested inside a cleanup scope. If an exception is not caught
+// by the try's handlers, the resume from the unwind handler should chain
+// to the outer cleanup's EH handler.
+cir.func @test_try_in_cleanup() {
+ %0 = cir.alloca !rec_SomeClass, !cir.ptr<!rec_SomeClass>, ["c", init] {alignment = 4 : i64}
+ %1 = cir.alloca !cir.ptr<!rec_exception>, !cir.ptr<!cir.ptr<!rec_exception>>, ["e"] {alignment = 8 : i64}
+ cir.call @ctor(%0) : (!cir.ptr<!rec_SomeClass>) -> ()
+ cir.cleanup.scope {
+ cir.scope {
+ cir.try {
+ cir.call @doSomething(%0) : (!cir.ptr<!rec_SomeClass>) -> ()
+ cir.yield
+ } catch [type #cir.global_view<@_ZTISt9exception> : !cir.ptr<!u8i>] (%eh_token : !cir.eh_token) {
+ %catch_token, %2 = cir.begin_catch %eh_token : !cir.eh_token -> (!cir.catch_token, !cir.ptr<!cir.ptr<!rec_exception>>)
+ cir.end_catch %catch_token : !cir.catch_token
+ cir.yield
+ } unwind (%eh_token : !cir.eh_token) {
+ cir.resume %eh_token : !cir.eh_token
+ }
+ }
+ cir.yield
+ } cleanup all {
+ cir.call @dtor(%0) nothrow : (!cir.ptr<!rec_SomeClass>) -> ()
+ cir.yield
+ }
+ cir.return
+}
+
+// CHECK-LABEL: cir.func @test_try_in_cleanup()
+// CHECK: %[[C:.*]] = cir.alloca !rec_SomeClass
+// CHECK: %[[E:.*]] = cir.alloca !cir.ptr<!rec_std3A3Aexception>
+// CHECK: cir.call @ctor(%[[C]])
+// CHECK: cir.br ^[[OUTER_CLEANUP_BODY:bb[0-9]+]]
+//
+// CHECK: ^[[OUTER_CLEANUP_BODY]]:
+// CHECK: cir.br ^[[SCOPE_ENTER:bb[0-9]+]]
+//
+// CHECK: ^[[SCOPE_ENTER]]:
+// CHECK: cir.br ^[[TRY_BODY:bb[0-9]+]]
+//
+// CHECK: ^[[TRY_BODY]]:
+// CHECK: cir.try_call @doSomething(%[[C]]) ^[[NORMAL:bb[0-9]+]], ^[[TRY_UNWIND:bb[0-9]+]]
+//
+// CHECK: ^[[NORMAL]]:
+// CHECK: cir.br ^[[TRY_CONT:bb[0-9]+]]
+//
+// CHECK: ^[[TRY_UNWIND]]:
+// CHECK: %[[TRY_EH:.*]] = cir.eh.initiate : !cir.eh_token
+// CHECK: cir.br ^[[DISPATCH:bb[0-9]+]](%[[TRY_EH]] : !cir.eh_token)
+//
+// CHECK: ^[[DISPATCH]](%[[DISP_ET:.*]]: !cir.eh_token):
+// CHECK: cir.eh.dispatch %[[DISP_ET]] : !cir.eh_token [
+// CHECK: catch(#cir.global_view<@_ZTISt9exception> : !cir.ptr<!u8i>) : ^[[CATCH:bb[0-9]+]],
+// CHECK: unwind : ^[[UNWIND_HANDLER:bb[0-9]+]]
+// CHECK: ]
+//
+// CHECK: ^[[CATCH]](%[[CATCH_ET:.*]]: !cir.eh_token):
+// CHECK: %{{.*}}, %{{.*}} = cir.begin_catch %[[CATCH_ET]]
+// CHECK: cir.end_catch
+// CHECK: cir.br ^[[TRY_CONT]]
+//
+// CHECK: ^[[UNWIND_HANDLER]](%[[UW_ET:.*]]: !cir.eh_token):
+// CHECK: cir.br ^[[OUTER_EH_CLEANUP:bb[0-9]+]](%[[UW_ET]] : !cir.eh_token)
+//
+// CHECK: ^[[TRY_CONT]]:
+// CHECK: cir.br ^[[SCOPE_EXIT:bb[0-9]+]]
+//
+// CHECK: ^[[SCOPE_EXIT]]:
+// CHECK: cir.br ^[[NORMAL_CLEANUP:bb[0-9]+]]
+//
+// CHECK: ^[[NORMAL_CLEANUP]]:
+// CHECK: cir.call @dtor(%[[C]]) nothrow
+// CHECK: cir.br ^[[CLEANUP_EXIT:bb[0-9]+]]
+//
+// CHECK: ^[[CLEANUP_EXIT]]:
+// CHECK: cir.br ^[[RETURN:bb[0-9]+]]
+//
+// CHECK: ^[[OUTER_EH_CLEANUP]](%[[OEH_ET:.*]]: !cir.eh_token):
+// CHECK: %{{.*}} = cir.begin_cleanup %[[OEH_ET]]
+// CHECK: cir.call @dtor(%[[C]]) nothrow
+// CHECK: cir.end_cleanup
+// CHECK: cir.resume %[[OEH_ET]] : !cir.eh_token
+//
+// CHECK: ^[[RETURN]]:
+// CHECK: cir.return
+
+// Test try with catch all and no throwing calls.
+// The try body doesn't have throwing calls, so no try_call/unwind blocks are
+// needed. The handlers are still built but won't be reachable.
+cir.func @test_try_catch_all_no_throwing_calls() {
+ cir.scope {
+ cir.try {
+ %0 = cir.const #cir.int<42> : !s32i
+ cir.yield
+ } catch all (%eh_token : !cir.eh_token) {
+ %catch_token, %exn_ptr = cir.begin_catch %eh_token : !cir.eh_token -> (!cir.catch_token, !cir.ptr<!cir.void>)
+ cir.end_catch %catch_token : !cir.catch_token
+ cir.yield
+ }
+ }
+ cir.return
+}
+
+// CHECK-LABEL: cir.func @test_try_catch_all_no_throwing_calls()
+// CHECK: cir.br ^[[SCOPE:bb[0-9]+]]
+// CHECK: ^[[SCOPE]]:
+// CHECK: cir.br ^[[TRY_BODY:bb[0-9]+]]
+//
+// CHECK: ^[[TRY_BODY]]:
+// CHECK: %{{.*}} = cir.const #cir.int<42> : !s32i
+// CHECK: cir.br ^[[CONTINUE:bb[0-9]+]]
+//
+// Catch dispatch (unreachable since there are no throwing calls).
+// CHECK: ^[[DISPATCH:bb[0-9]+]](%[[DISP_ET:.*]]: !cir.eh_token):
+// CHECK: cir.eh.dispatch %[[DISP_ET]] : !cir.eh_token [
+// CHECK: catch_all : ^[[CATCH_ALL:bb[0-9]+]]
+// CHECK: ]
+//
+// Catch-all handler (unreachable).
+// CHECK: ^[[CATCH_ALL]](%[[ET:.*]]: !cir.eh_token):
+// CHECK: %[[CT:.*]], %{{.*}} = cir.begin_catch %[[ET]] : !cir.eh_token -> (!cir.catch_token, !cir.ptr<!void>)
+// CHECK: cir.end_catch %[[CT]] : !cir.catch_token
+// CHECK: cir.br ^[[CONTINUE]]
+//
+// CHECK: ^[[CONTINUE]]:
+// CHECK: cir.br ^[[SCOPE_EXIT:bb[0-9]+]]
+// CHECK: ^[[SCOPE_EXIT]]:
+// CHECK: cir.return
+
+// Test try-catch with multiple typed handlers and catch-all.
+cir.func @test_try_multiple_handlers() {
+ cir.scope {
+ cir.try {
+ cir.call @mayThrow() : () -> ()
+ cir.yield
+ } catch [type #cir.global_view<@_ZTIi> : !cir.ptr<!u8i>] (%eh_token : !cir.eh_token) {
+ %ct1, %exn1 = cir.begin_catch %eh_token : !cir.eh_token -> (!cir.catch_token, !cir.ptr<!s32i>)
+ cir.end_catch %ct1 : !cir.catch_token
+ cir.yield
+ } catch all (%eh_token : !cir.eh_token) {
+ %ct2, %exn2 = cir.begin_catch %eh_token : !cir.eh_token -> (!cir.catch_token, !cir.ptr<!cir.void>)
+ cir.end_catch %ct2 : !cir.catch_token
+ cir.yield
+ }
+ }
+ cir.return
+}
+
+// CHECK-LABEL: cir.func @test_try_multiple_handlers()
+// CHECK: cir.br ^[[SCOPE:bb[0-9]+]]
+// CHECK: ^[[SCOPE]]:
+// CHECK: cir.br ^[[TRY_BODY:bb[0-9]+]]
+//
+// CHECK: ^[[TRY_BODY]]:
+// CHECK: cir.try_call @mayThrow() ^[[NORMAL:bb[0-9]+]], ^[[UNWIND:bb[0-9]+]]
+//
+// CHECK: ^[[NORMAL]]:
+// CHECK: cir.br ^[[CONTINUE:bb[0-9]+]]
+//
+// CHECK: ^[[UNWIND]]:
+// CHECK: %[[EH_TOK:.*]] = cir.eh.initiate : !cir.eh_token
+// CHECK: cir.br ^[[DISPATCH:bb[0-9]+]](%[[EH_TOK]] : !cir.eh_token)
+//
+// CHECK: ^[[DISPATCH]](%[[DISP_ET:.*]]: !cir.eh_token):
+// CHECK: cir.eh.dispatch %[[DISP_ET]] : !cir.eh_token [
+// CHECK: catch(#cir.global_view<@_ZTIi> : !cir.ptr<!u8i>) : ^[[CATCH_INT:bb[0-9]+]],
+// CHECK: catch_all : ^[[CATCH_ALL:bb[0-9]+]]
+// CHECK: ]
+//
+// CHECK: ^[[CATCH_INT]](%[[INT_ET:.*]]: !cir.eh_token):
+// CHECK: %{{.*}}, %{{.*}} = cir.begin_catch %[[INT_ET]] : !cir.eh_token -> (!cir.catch_token, !cir.ptr<!s32i>)
+// CHECK: cir.end_catch
+// CHECK: cir.br ^[[CONTINUE]]
+//
+// CHECK: ^[[CATCH_ALL]](%[[ALL_ET:.*]]: !cir.eh_token):
+// CHECK: %{{.*}}, %{{.*}} = cir.begin_catch %[[ALL_ET]] : !cir.eh_token -> (!cir.catch_token, !cir.ptr<!void>)
+// CHECK: cir.end_catch
+// CHECK: cir.br ^[[CONTINUE]]
+//
+// CHECK: ^[[CONTINUE]]:
+// CHECK: cir.br ^{{.*}}
+// CHECK: cir.return
+
+// Test nested try ops.
+// The inner try is flattened first. Its unwind handler's resume is then
+// chained to the outer try's catch dispatch when the outer try is flattened.
+cir.func @test_nested_try() {
+ cir.scope {
+ cir.try {
+ cir.try {
+ cir.call @mayThrow() : () -> ()
+ cir.yield
+ } catch [type #cir.global_view<@_ZTIi> : !cir.ptr<!u8i>] (%eh_token : !cir.eh_token) {
+ %ct, %exn = cir.begin_catch %eh_token : !cir.eh_token -> (!cir.catch_token, !cir.ptr<!s32i>)
+ cir.end_catch %ct : !cir.catch_token
+ cir.yield
+ } unwind (%eh_token : !cir.eh_token) {
+ cir.resume %eh_token : !cir.eh_token
+ }
+ cir.yield
+ } catch all (%eh_token : !cir.eh_token) {
+ %ct, %exn = cir.begin_catch %eh_token : !cir.eh_token -> (!cir.catch_token, !cir.ptr<!cir.void>)
+ cir.end_catch %ct : !cir.catch_token
+ cir.yield
+ }
+ }
+ cir.return
+}
+
+// CHECK-LABEL: cir.func @test_nested_try()
+// CHECK: cir.br ^[[SCOPE:bb[0-9]+]]
+// CHECK: ^[[SCOPE]]:
+// CHECK: cir.br ^[[OUTER_BODY:bb[0-9]+]]
+//
+// CHECK: ^[[OUTER_BODY]]:
+// CHECK: cir.br ^[[INNER_BODY:bb[0-9]+]]
+//
+// CHECK: ^[[INNER_BODY]]:
+// CHECK: cir.try_call @mayThrow() ^[[INNER_NORMAL:bb[0-9]+]], ^[[INNER_UNWIND:bb[0-9]+]]
+//
+// CHECK: ^[[INNER_NORMAL]]:
+// CHECK: cir.br ^[[INNER_CONTINUE:bb[0-9]+]]
+//
+// CHECK: ^[[INNER_UNWIND]]:
+// CHECK: %[[IEH:.*]] = cir.eh.initiate : !cir.eh_token
+// CHECK: cir.br ^[[INNER_DISPATCH:bb[0-9]+]](%[[IEH]] : !cir.eh_token)
+//
+// CHECK: ^[[INNER_DISPATCH]](%[[ID_ET:.*]]: !cir.eh_token):
+// CHECK: cir.eh.dispatch %[[ID_ET]] : !cir.eh_token [
+// CHECK: catch(#cir.global_view<@_ZTIi> : !cir.ptr<!u8i>) : ^[[INNER_CATCH:bb[0-9]+]],
+// CHECK: unwind : ^[[INNER_UW_HANDLER:bb[0-9]+]]
+// CHECK: ]
+//
+// CHECK: ^[[INNER_CATCH]](%[[IC_ET:.*]]: !cir.eh_token):
+// CHECK: %{{.*}}, %{{.*}} = cir.begin_catch %[[IC_ET]] : !cir.eh_token -> (!cir.catch_token, !cir.ptr<!s32i>)
+// CHECK: cir.end_catch
+// CHECK: cir.br ^[[INNER_CONTINUE]]
+//
+// CHECK: ^[[INNER_UW_HANDLER]](%[[IUW_ET:.*]]: !cir.eh_token):
+// CHECK: cir.br ^[[OUTER_DISPATCH:bb[0-9]+]](%[[IUW_ET]] : !cir.eh_token)
+//
+// CHECK: ^[[INNER_CONTINUE]]:
+// CHECK: cir.br ^[[OUTER_CONTINUE:bb[0-9]+]]
+//
+// CHECK: ^[[OUTER_DISPATCH]](%[[OD_ET:.*]]: !cir.eh_token):
+// CHECK: cir.eh.dispatch %[[OD_ET]] : !cir.eh_token [
+// CHECK: catch_all : ^[[OUTER_CATCH:bb[0-9]+]]
+// CHECK: ]
+//
+// CHECK: ^[[OUTER_CATCH]](%[[OC_ET:.*]]: !cir.eh_token):
+// CHECK: %{{.*}}, %{{.*}} = cir.begin_catch %[[OC_ET]] : !cir.eh_token -> (!cir.catch_token, !cir.ptr<!void>)
+// CHECK: cir.end_catch
+// CHECK: cir.br ^[[OUTER_CONTINUE]]
+//
+// CHECK: ^[[OUTER_CONTINUE]]:
+// CHECK: cir.br ^{{.*}}
+// CHECK: cir.return
+
+// Test a try op with handlers nested inside a cleanup scope and a cleanup
+// scope inside a catch handler. This is the form of the catch handler that will
+// actually be generated. The representations above are all simplifications.
+cir.func @test_try_with_handlers_in_cleanup() {
+ %0 = cir.alloca !rec_SomeClass, !cir.ptr<!rec_SomeClass>, ["c", init] {alignment = 4 : i64}
+ cir.call @ctor(%0) : (!cir.ptr<!rec_SomeClass>) -> ()
+ cir.cleanup.scope {
+ cir.try {
+ cir.call @doSomething(%0) : (!cir.ptr<!rec_SomeClass>) -> ()
+ cir.yield
+ } catch all (%eh_token : !cir.eh_token) {
+ %catch_token, %1 = cir.begin_catch %eh_token : !cir.eh_token -> (!cir.catch_token, !cir.ptr<!void>)
+ cir.cleanup.scope {
+ cir.yield
+ } cleanup all {
+ cir.end_catch %catch_token : !cir.catch_token
+ cir.yield
+ }
+ cir.yield
+ }
+ cir.yield
+ } cleanup all {
+ cir.call @dtor(%0) nothrow : (!cir.ptr<!rec_SomeClass>) -> ()
+ cir.yield
+ }
+ cir.return
+}
+
+// CHECK-LABEL: cir.func @test_try_with_handlers_in_cleanup()
+// CHECK: %[[C:.*]] = cir.alloca !rec_SomeClass
+// CHECK: cir.call @ctor(%[[C]])
+// CHECK: cir.br ^[[CLEANUP_BODY:bb[0-9]+]]
+//
+// CHECK: ^[[CLEANUP_BODY]]:
+// CHECK: cir.br ^[[TRY_BODY:bb[0-9]+]]
+//
+// CHECK: ^[[TRY_BODY]]:
+// CHECK: cir.try_call @doSomething(%[[C]]) ^[[NORMAL:bb[0-9]+]], ^[[UNWIND:bb[0-9]+]]
+//
+// CHECK: ^[[NORMAL]]:
+// CHECK: cir.br ^[[TRY_CONTINUE:bb[0-9]+]]
+//
+// CHECK: ^[[UNWIND]]:
+// CHECK: %[[EH_TOK:.*]] = cir.eh.initiate : !cir.eh_token
+// CHECK: cir.br ^[[DISPATCH:bb[0-9]+]](%[[EH_TOK]] : !cir.eh_token)
+//
+// CHECK: ^[[DISPATCH]](%[[DISP_ET:.*]]: !cir.eh_token):
+// CHECK: cir.eh.dispatch %[[DISP_ET]] : !cir.eh_token [
+// CHECK: catch_all : ^[[CATCH_ALL:bb[0-9]+]]
+// CHECK: ]
+//
+// CHECK: ^[[CATCH_ALL]](%[[ET:.*]]: !cir.eh_token):
+// CHECK: %[[CATCH_TOK:.*]], %{{.*}} = cir.begin_catch %[[ET]]
+// CHECK: cir.br ^[[INNER_CLEANUP_BODY:bb[0-9]+]]
+// CHECK: ^[[INNER_CLEANUP_BODY]]:
+// CHECK: cir.br ^[[NORMAL_CLEANUP:bb[0-9]+]]
+// CHECK: ^[[NORMAL_CLEANUP]]:
+// CHECK: cir.end_catch %[[CATCH_TOK]]
+// CHECK: cir.br ^[[CLEANUP_EXIT:bb[0-9]+]]
+// CHECK: ^[[CLEANUP_EXIT]]:
+// CHECK: cir.br ^[[CATCH_EXIT:bb[0-9]+]]
+// CHECK: ^[[CATCH_EXIT]]:
+// CHECK: cir.br ^[[TRY_CONTINUE]]
+//
+// CHECK: ^[[TRY_CONTINUE]]:
+// CHECK: cir.br ^{{.*}}
+// CHECK: cir.call @dtor(%[[C]]) nothrow
+//
+// CHECK: cir.return
+
+// Test nested try ops with a cleanup scope in the outer try body.
+// The inner try's unwind handler must execute the cleanup (dtor) before
+// the exception reaches the outer try's catch dispatch.
+cir.func @test_nested_try_with_cleanup() {
+ %0 = cir.alloca !rec_SomeClass, !cir.ptr<!rec_SomeClass>, ["c", init] {alignment = 4 : i64}
+ cir.scope {
+ cir.try {
+ cir.call @ctor(%0) : (!cir.ptr<!rec_SomeClass>) -> ()
+ cir.cleanup.scope {
+ cir.try {
+ cir.call @doSomething(%0) : (!cir.ptr<!rec_SomeClass>) -> ()
+ cir.yield
+ } catch [type #cir.global_view<@_ZTIi> : !cir.ptr<!u8i>] (%eh_token : !cir.eh_token) {
+ %ct, %exn = cir.begin_catch %eh_token : !cir.eh_token -> (!cir.catch_token, !cir.ptr<!s32i>)
+ cir.end_catch %ct : !cir.catch_token
+ cir.yield
+ } unwind (%eh_token : !cir.eh_token) {
+ cir.resume %eh_token : !cir.eh_token
+ }
+ cir.yield
+ } cleanup all {
+ cir.call @dtor(%0) nothrow : (!cir.ptr<!rec_SomeClass>) -> ()
+ cir.yield
+ }
+ cir.yield
+ } catch all (%eh_token : !cir.eh_token) {
+ %ct, %exn = cir.begin_catch %eh_token : !cir.eh_token -> (!cir.catch_token, !cir.ptr<!cir.void>)
+ cir.end_catch %ct : !cir.catch_token
+ cir.yield
+ }
+ }
+ cir.return
+}
+
+// CHECK-LABEL: cir.func @test_nested_try_with_cleanup()
+// CHECK: %[[C:.*]] = cir.alloca !rec_SomeClass
+// CHECK: cir.br ^[[SCOPE:bb[0-9]+]]
+//
+// CHECK: ^[[SCOPE]]:
+// CHECK: cir.br ^[[TRY_BODY:bb[0-9]+]]
+//
+// CHECK: ^[[TRY_BODY]]:
+// CHECK: cir.try_call @ctor(%[[C]]) ^[[AFTER_CTOR:bb[0-9]+]], ^[[CTOR_UNWIND:bb[0-9]+]]
+//
+// CHECK: ^[[AFTER_CTOR]]:
+// CHECK: cir.br ^[[CLEANUP_BODY:bb[0-9]+]]
+//
+// CHECK: ^[[CLEANUP_BODY]]:
+// CHECK: cir.br ^[[INNER_TRY_BODY:bb[0-9]+]]
+//
+// CHECK: ^[[INNER_TRY_BODY]]:
+// CHECK: cir.try_call @doSomething(%[[C]]) ^[[INNER_NORMAL:bb[0-9]+]], ^[[INNER_UNWIND:bb[0-9]+]]
+//
+// CHECK: ^[[INNER_NORMAL]]:
+// CHECK: cir.br ^[[INNER_CONTINUE:bb[0-9]+]]
+//
+// CHECK: ^[[INNER_UNWIND]]:
+// CHECK: %[[IEH:.*]] = cir.eh.initiate : !cir.eh_token
+// CHECK: cir.br ^[[INNER_DISPATCH:bb[0-9]+]](%[[IEH]] : !cir.eh_token)
+//
+// CHECK: ^[[INNER_DISPATCH]](%[[ID_ET:.*]]: !cir.eh_token):
+// CHECK: cir.eh.dispatch %[[ID_ET]] : !cir.eh_token [
+// CHECK: catch(#cir.global_view<@_ZTIi> : !cir.ptr<!u8i>) : ^[[INNER_CATCH:bb[0-9]+]],
+// CHECK: unwind : ^[[INNER_UW_HANDLER:bb[0-9]+]]
+// CHECK: ]
+//
+// CHECK: ^[[INNER_CATCH]](%[[IC_ET:.*]]: !cir.eh_token):
+// CHECK: %{{.*}}, %{{.*}} = cir.begin_catch %[[IC_ET]] : !cir.eh_token -> (!cir.catch_token, !cir.ptr<!s32i>)
+// CHECK: cir.end_catch
+// CHECK: cir.br ^[[INNER_CONTINUE]]
+//
+// The inner unwind handler goes through the EH cleanup (dtor) before
+// reaching the outer try's catch dispatch.
+// CHECK: ^[[INNER_UW_HANDLER]](%[[IUW_ET:.*]]: !cir.eh_token):
+// CHECK: cir.br ^[[EH_CLEANUP:bb[0-9]+]](%[[IUW_ET]] : !cir.eh_token)
+//
+// The continuation path after the inner try catches an exception branches to
+// the normal outer cleanup.
+// CHECK: ^[[INNER_CONTINUE]]:
+// CHECK: cir.br ^[[NORMAL_CLEANUP:bb[0-9]+]]
+//
+// CHECK: ^[[NORMAL_CLEANUP]]:
+// CHECK: cir.call @dtor(%[[C]]) nothrow
+// CHECK: cir.br ^[[CLEANUP_EXIT:bb[0-9]+]]
+//
+// CHECK: ^[[CLEANUP_EXIT]]:
+// CHECK: cir.br ^[[TRY_EXIT:bb[0-9]+]]
+//
+// EH cleanup: the cleanup (dtor) runs before unwinding to the outer dispatch.
+// CHECK: ^[[EH_CLEANUP]](%[[EH_ET:.*]]: !cir.eh_token):
+// CHECK: %[[CT:.*]] = cir.begin_cleanup %[[EH_ET]] : !cir.eh_token -> !cir.cleanup_token
+// CHECK: cir.call @dtor(%[[C]]) nothrow
+// CHECK: cir.end_cleanup %[[CT]] : !cir.cleanup_token
+// CHECK: cir.br ^[[OUTER_DISPATCH:bb[0-9]+]](%[[EH_ET]] : !cir.eh_token)
+//
+// CHECK: ^[[TRY_EXIT]]:
+// CHECK: cir.br ^[[CONTINUE:bb[0-9]+]]
+//
+// Ctor's unwind goes directly to outer dispatch (cleanup scope not yet entered).
+// CHECK: ^[[CTOR_UNWIND]]:
+// CHECK: %[[CTOR_EH:.*]] = cir.eh.initiate : !cir.eh_token
+// CHECK: cir.br ^[[OUTER_DISPATCH]](%[[CTOR_EH]] : !cir.eh_token)
+//
+// CHECK: ^[[OUTER_DISPATCH]](%[[OD_ET:.*]]: !cir.eh_token):
+// CHECK: cir.eh.dispatch %[[OD_ET]] : !cir.eh_token [
+// CHECK: catch_all : ^[[OUTER_CATCH:bb[0-9]+]]
+// CHECK: ]
+//
+// CHECK: ^[[OUTER_CATCH]](%[[OC_ET:.*]]: !cir.eh_token):
+// CHECK: %{{.*}}, %{{.*}} = cir.begin_catch %[[OC_ET]] : !cir.eh_token -> (!cir.catch_token, !cir.ptr<!void>)
+// CHECK: cir.end_catch
+// CHECK: cir.br ^[[CONTINUE]]
+//
+// CHECK: ^[[CONTINUE]]:
+// CHECK: cir.br ^[[RETURN:bb[0-9]+]]
+// CHECK: ^[[RETURN]]:
+// CHECK: cir.return
+
+// Test try-catch with multiple typed handlers and a catch-all.
+cir.func @test_try_multiple_typed_and_catch_all() {
+ cir.scope {
+ cir.try {
+ cir.call @mayThrow() : () -> ()
+ cir.yield
+ } catch [type #cir.global_view<@_ZTISt9exception> : !cir.ptr<!u8i>] (%eh_token : !cir.eh_token) {
+ %ct1, %exn1 = cir.begin_catch %eh_token : !cir.eh_token -> (!cir.catch_token, !cir.ptr<!cir.ptr<!rec_exception>>)
+ cir.end_catch %ct1 : !cir.catch_token
+ cir.yield
+ } catch [type #cir.global_view<@_ZTIi> : !cir.ptr<!u8i>] (%eh_token : !cir.eh_token) {
+ %ct2, %exn2 = cir.begin_catch %eh_token : !cir.eh_token -> (!cir.catch_token, !cir.ptr<!s32i>)
+ cir.end_catch %ct2 : !cir.catch_token
+ cir.yield
+ } catch all (%eh_token : !cir.eh_token) {
+ %ct3, %exn3 = cir.begin_catch %eh_token : !cir.eh_token -> (!cir.catch_token, !cir.ptr<!cir.void>)
+ cir.end_catch %ct3 : !cir.catch_token
+ cir.yield
+ }
+ }
+ cir.return
+}
+
+// CHECK-LABEL: cir.func @test_try_multiple_typed_and_catch_all()
+// CHECK: cir.br ^[[SCOPE:bb[0-9]+]]
+//
+// CHECK: ^[[SCOPE]]:
+// CHECK: cir.br ^[[TRY_BODY:bb[0-9]+]]
+//
+// CHECK: ^[[TRY_BODY]]:
+// CHECK: cir.try_call @mayThrow() ^[[NORMAL:bb[0-9]+]], ^[[UNWIND:bb[0-9]+]]
+//
+// CHECK: ^[[NORMAL]]:
+// CHECK: cir.br ^[[CONTINUE:bb[0-9]+]]
+//
+// CHECK: ^[[UNWIND]]:
+// CHECK: %[[EH_TOK:.*]] = cir.eh.initiate : !cir.eh_token
+// CHECK: cir.br ^[[DISPATCH:bb[0-9]+]](%[[EH_TOK]] : !cir.eh_token)
+//
+// CHECK: ^[[DISPATCH]](%[[DISP_ET:.*]]: !cir.eh_token):
+// CHECK: cir.eh.dispatch %[[DISP_ET]] : !cir.eh_token [
+// CHECK: catch(#cir.global_view<@_ZTISt9exception> : !cir.ptr<!u8i>) : ^[[CATCH_EXN:bb[0-9]+]],
+// CHECK: catch(#cir.global_view<@_ZTIi> : !cir.ptr<!u8i>) : ^[[CATCH_INT:bb[0-9]+]],
+// CHECK: catch_all : ^[[CATCH_ALL:bb[0-9]+]]
+// CHECK: ]
+//
+// CHECK: ^[[CATCH_EXN]](%[[EXN_ET:.*]]: !cir.eh_token):
+// CHECK: %{{.*}}, %{{.*}} = cir.begin_catch %[[EXN_ET]] : !cir.eh_token -> (!cir.catch_token, !cir.ptr<!cir.ptr<!rec_std3A3Aexception>>)
+// CHECK: cir.end_catch
+// CHECK: cir.br ^[[CONTINUE]]
+//
+// CHECK: ^[[CATCH_INT]](%[[INT_ET:.*]]: !cir.eh_token):
+// CHECK: %{{.*}}, %{{.*}} = cir.begin_catch %[[INT_ET]] : !cir.eh_token -> (!cir.catch_token, !cir.ptr<!s32i>)
+// CHECK: cir.end_catch
+// CHECK: cir.br ^[[CONTINUE]]
+//
+// CHECK: ^[[CATCH_ALL]](%[[ALL_ET:.*]]: !cir.eh_token):
+// CHECK: %{{.*}}, %{{.*}} = cir.begin_catch %[[ALL_ET]] : !cir.eh_token -> (!cir.catch_token, !cir.ptr<!void>)
+// CHECK: cir.end_catch
+// CHECK: cir.br ^[[CONTINUE]]
+//
+// CHECK: ^[[CONTINUE]]:
+// CHECK: cir.br ^[[RETURN:bb[0-9]+]]
+// CHECK: ^[[RETURN]]:
+// CHECK: cir.return
+
+cir.func private @mayThrow()
+cir.func private @ctor(!cir.ptr<!rec_SomeClass>)
+cir.func private @dtor(!cir.ptr<!rec_SomeClass>) attributes {nothrow}
+cir.func private @doSomething(!cir.ptr<!rec_SomeClass>)
+cir.global "private" constant external @_ZTISt9exception : !cir.ptr<!u8i>
+cir.global "private" constant external @_ZTIi : !cir.ptr<!u8i>
More information about the cfe-commits
mailing list