[llvm] [IsolatePath] Add a pass to isolate paths with undefined behavior (PR #146791)

Bill Wendling via llvm-commits llvm-commits at lists.llvm.org
Wed Jul 2 14:54:37 PDT 2025


https://github.com/bwendling created https://github.com/llvm/llvm-project/pull/146791

This pass isolates code paths with undefined behavior. This allows
us to replace the undefined behavior with an explicit 'trap'
instruction while allowing non-UB code to remain.

This first patch implements accessing a 'nullptr'. It analyses PHI
nodes that return a 'ptr' type and which have constant 'null' as an
incoming value. It then searches that basic block for any uses of
the PHI node in a 'load' or 'store' instruction. If a 'load' or 'store'
is found, a 'trap' instruction is generated instead.

Example, we convert this code:

    foo:
      %phi.val = phi ptr [ %arrayidx.i, %pred1 ], [ null, %pred2 ]
      %load.val = load i32, ptr %phi.val, align 4
       ...

into:

    foo:
      %load.val = load i32, ptr %ptr.val, align 4
       ...

    foo.ub.path:
      %load.val = load i32, ptr null, align 4
      tail call void @llvm.trap()
      unreachable

Note: we allow the NULL dereference to actually occur so that code that
wishes to catch the signal can do so.

>From 0db45878fc6105d01172495a8267eba407deaf64 Mon Sep 17 00:00:00 2001
From: Bill Wendling <morbo at google.com>
Date: Tue, 6 May 2025 17:01:39 -0700
Subject: [PATCH] [IsolatePath] Add a pass to isolate paths with undefined
 behavior

This pass isolates code paths with undefined behavior. This allows us to
replace the undefined behavior with an explicit 'trap' instruction while
allowing non-UB code to remain.

This first patch implements accessing a 'nullptr'. It analyses PHI nodes
that return a 'ptr' type and which have constant 'null' as an incoming
value. It then searches that basic block for any uses of the PHI node in
a 'load' or 'store' instruction. If a 'load' or 'store' is found, a
'trap' instruction is generated instead.

Example, we convert this code:

    foo:
      %phi.val = phi ptr [ %arrayidx.i, %pred1 ], [ null, %pred2 ]
      %load.val = load i32, ptr %phi.val, align 4
       ...

into:

    foo:
      %load.val = load i32, ptr %ptr.val, align 4
       ...

    foo.ub.path:
      tail call void @llvm.trap()
      unreachable
---
 .../llvm/Transforms/Scalar/IsolatePath.h      |  58 +++
 llvm/lib/Passes/PassBuilder.cpp               |   1 +
 llvm/lib/Passes/PassBuilderPipelines.cpp      |   5 +
 llvm/lib/Passes/PassRegistry.def              |   1 +
 llvm/lib/Transforms/Scalar/CMakeLists.txt     |   1 +
 llvm/lib/Transforms/Scalar/IsolatePath.cpp    | 300 ++++++++++++++
 llvm/test/Other/new-pm-defaults.ll            |   3 +-
 .../Other/new-pm-thinlto-postlink-defaults.ll |   3 +-
 .../new-pm-thinlto-postlink-pgo-defaults.ll   |   1 +
 ...-pm-thinlto-postlink-samplepgo-defaults.ll |   1 +
 .../Other/new-pm-thinlto-prelink-defaults.ll  |   3 +-
 .../new-pm-thinlto-prelink-pgo-defaults.ll    |   4 +-
 ...w-pm-thinlto-prelink-samplepgo-defaults.ll |   1 +
 .../IsolatePath/ub-memory-accesses.ll         | 370 ++++++++++++++++++
 14 files changed, 748 insertions(+), 4 deletions(-)
 create mode 100644 llvm/include/llvm/Transforms/Scalar/IsolatePath.h
 create mode 100644 llvm/lib/Transforms/Scalar/IsolatePath.cpp
 create mode 100644 llvm/test/Transforms/IsolatePath/ub-memory-accesses.ll

diff --git a/llvm/include/llvm/Transforms/Scalar/IsolatePath.h b/llvm/include/llvm/Transforms/Scalar/IsolatePath.h
new file mode 100644
index 0000000000000..fce4c4ceaa27b
--- /dev/null
+++ b/llvm/include/llvm/Transforms/Scalar/IsolatePath.h
@@ -0,0 +1,58 @@
+//===- IsolatePath.cpp - Code to isolate paths with UB ----------*- C++ -*-===//
+//
+// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+// See https://llvm.org/LICENSE.txt for license information.
+// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+//
+//===----------------------------------------------------------------------===//
+//
+// This pass isolates code paths with undefined behavior from paths without
+// undefined behavior, and then add a trap instruction on that path. This
+// prevents code generation where, after the UB instruction's eliminated, the
+// code can wander off the end of a function.
+//
+// For example, a nullptr dereference:
+//
+//   foo:
+//     %phi.val = phi ptr [ %arrayidx.i, %pred1 ], [ null, %pred2 ]
+//     %load.val = load i32, ptr %phi.val, align 4
+//
+// is converted into:
+//
+//   foo.ub.path:
+//     %load.val.ub = load volatile i32, ptr null, align 4
+//     tail call void @llvm.trap()
+//     unreachable
+//
+// Note: we allow the NULL dereference to actually occur so that code that
+// wishes to catch the signal can do so.
+//
+//===----------------------------------------------------------------------===//
+
+#ifndef LLVM_TRANSFORMS_SCALAR_ISOLATEPATH_H
+#define LLVM_TRANSFORMS_SCALAR_ISOLATEPATH_H
+
+#include "llvm/ADT/SmallPtrSet.h"
+#include "llvm/IR/PassManager.h"
+
+namespace llvm {
+
+class BasicBlock;
+class DomTreeUpdater;
+class Function;
+
+/// This pass performs 'path isolation', which looks for undefined behavior and
+/// isolates the path from non-undefined behavior code and converts the UB into
+/// a trap call.
+class IsolatePathPass : public PassInfoMixin<IsolatePathPass> {
+  SmallPtrSet<BasicBlock *, 4> SplitUBBlocks;
+
+  bool ProcessPointerUndefinedBehavior(BasicBlock *BB, DomTreeUpdater *DTU);
+
+public:
+  PreservedAnalyses run(Function &F, FunctionAnalysisManager &AM);
+};
+
+} // end namespace llvm
+
+#endif // LLVM_TRANSFORMS_SCALAR_ISOLATEPATH_H
diff --git a/llvm/lib/Passes/PassBuilder.cpp b/llvm/lib/Passes/PassBuilder.cpp
index 0697a0a6b4c74..0addad282b89b 100644
--- a/llvm/lib/Passes/PassBuilder.cpp
+++ b/llvm/lib/Passes/PassBuilder.cpp
@@ -277,6 +277,7 @@
 #include "llvm/Transforms/Scalar/InferAddressSpaces.h"
 #include "llvm/Transforms/Scalar/InferAlignment.h"
 #include "llvm/Transforms/Scalar/InstSimplifyPass.h"
+#include "llvm/Transforms/Scalar/IsolatePath.h"
 #include "llvm/Transforms/Scalar/JumpTableToSwitch.h"
 #include "llvm/Transforms/Scalar/JumpThreading.h"
 #include "llvm/Transforms/Scalar/LICM.h"
diff --git a/llvm/lib/Passes/PassBuilderPipelines.cpp b/llvm/lib/Passes/PassBuilderPipelines.cpp
index c83d2dc1f1514..ef72e0dc1f182 100644
--- a/llvm/lib/Passes/PassBuilderPipelines.cpp
+++ b/llvm/lib/Passes/PassBuilderPipelines.cpp
@@ -98,6 +98,7 @@
 #include "llvm/Transforms/Scalar/IndVarSimplify.h"
 #include "llvm/Transforms/Scalar/InferAlignment.h"
 #include "llvm/Transforms/Scalar/InstSimplifyPass.h"
+#include "llvm/Transforms/Scalar/IsolatePath.h"
 #include "llvm/Transforms/Scalar/JumpTableToSwitch.h"
 #include "llvm/Transforms/Scalar/JumpThreading.h"
 #include "llvm/Transforms/Scalar/LICM.h"
@@ -598,6 +599,10 @@ PassBuilder::buildFunctionSimplificationPipeline(OptimizationLevel Level,
         SimplifyCFGPass(SimplifyCFGOptions().convertSwitchRangeToICmp(true)));
   }
 
+  // Isolate paths with undefined behavior. At this point, all inlinable
+  // functions should be inlined and constants propagated.
+  FPM.addPass(IsolatePathPass());
+
   // Speculative execution if the target has divergent branches; otherwise nop.
   FPM.addPass(SpeculativeExecutionPass(/* OnlyIfDivergentTarget =*/true));
 
diff --git a/llvm/lib/Passes/PassRegistry.def b/llvm/lib/Passes/PassRegistry.def
index dd3dab3425975..ed473ceff9192 100644
--- a/llvm/lib/Passes/PassRegistry.def
+++ b/llvm/lib/Passes/PassRegistry.def
@@ -448,6 +448,7 @@ FUNCTION_PASS("interleaved-access", InterleavedAccessPass(TM))
 FUNCTION_PASS("interleaved-load-combine", InterleavedLoadCombinePass(TM))
 FUNCTION_PASS("invalidate<all>", InvalidateAllAnalysesPass())
 FUNCTION_PASS("irce", IRCEPass())
+FUNCTION_PASS("isolate-path", IsolatePathPass());
 FUNCTION_PASS("jump-threading", JumpThreadingPass())
 FUNCTION_PASS("jump-table-to-switch", JumpTableToSwitchPass());
 FUNCTION_PASS("kcfi", KCFIPass())
diff --git a/llvm/lib/Transforms/Scalar/CMakeLists.txt b/llvm/lib/Transforms/Scalar/CMakeLists.txt
index 84a5b02043d01..a1738aa7485df 100644
--- a/llvm/lib/Transforms/Scalar/CMakeLists.txt
+++ b/llvm/lib/Transforms/Scalar/CMakeLists.txt
@@ -24,6 +24,7 @@ add_llvm_component_library(LLVMScalarOpts
   InferAddressSpaces.cpp
   InferAlignment.cpp
   InstSimplifyPass.cpp
+  IsolatePath.cpp
   JumpThreading.cpp
   JumpTableToSwitch.cpp
   LICM.cpp
diff --git a/llvm/lib/Transforms/Scalar/IsolatePath.cpp b/llvm/lib/Transforms/Scalar/IsolatePath.cpp
new file mode 100644
index 0000000000000..6f561563c767f
--- /dev/null
+++ b/llvm/lib/Transforms/Scalar/IsolatePath.cpp
@@ -0,0 +1,300 @@
+//===- IsolatePath.cpp - Code to isolate paths with UB --------------------===//
+//
+// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+// See https://llvm.org/LICENSE.txt for license information.
+// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+//
+//===----------------------------------------------------------------------===//
+//
+// This pass isolates code paths with undefined behavior from paths without
+// undefined behavior, and then add a trap instruction on that path. This
+// prevents code generation where, after the UB instruction's eliminated, the
+// code can wander off the end of a function.
+//
+// For example, a nullptr dereference:
+//
+//   foo:
+//     %phi.val = phi ptr [ %arrayidx.i, %pred1 ], [ null, %pred2 ]
+//     %load.val = load i32, ptr %phi.val, align 4
+//
+// is converted into:
+//
+//   foo:
+//     %load.val = load i32, ptr %ptr.val, align 4
+//
+//   foo.ub.path:
+//     %load.val.ub = load volatile i32, ptr null, align 4
+//     tail call void @llvm.trap()
+//     unreachable
+//
+// Note: we allow the NULL dereference to actually occur so that code that
+// wishes to catch the signal can do so.
+//
+//===----------------------------------------------------------------------===//
+
+#include "llvm/Transforms/Scalar/IsolatePath.h"
+#include "llvm/ADT/Statistic.h"
+#include "llvm/Analysis/DomTreeUpdater.h"
+#include "llvm/Analysis/PostDominators.h"
+#include "llvm/IR/Constants.h"
+#include "llvm/IR/Dominators.h"
+#include "llvm/IR/IRBuilder.h"
+#include "llvm/IR/Instructions.h"
+#include "llvm/IR/IntrinsicInst.h"
+#include "llvm/Transforms/Utils/BasicBlockUtils.h"
+#include "llvm/Transforms/Utils/Cloning.h"
+
+using namespace llvm;
+
+#define DEBUG_TYPE "isolate-path"
+
+STATISTIC(NumIsolatedBlocks, "Number of isolated blocks");
+
+/// Look through GEPs to see if the nullptr is accessed.
+static bool HasUBAccess(BasicBlock *Parent, GetElementPtrInst *GEP) {
+  for (Value *V : GEP->materialized_users()) {
+    if (auto *G = dyn_cast<GetElementPtrInst>(V)) {
+      if (G->getParent() != Parent)
+        return false;
+      return HasUBAccess(Parent, G);
+    } else if (auto *LI = dyn_cast<LoadInst>(V)) {
+      if (LI->getParent() != Parent)
+        return false;
+      if (GEP == LI->getPointerOperand())
+        return true;
+    } else if (auto *SI = dyn_cast<StoreInst>(V)) {
+      if (SI->getParent() != Parent)
+        return false;
+      if (GEP == SI->getPointerOperand())
+        return true;
+    }
+  }
+
+  return false;
+}
+
+static std::pair<PHINode *, Instruction *> GetFirstUBInst(BasicBlock *BB) {
+  // Find PHIs that have 'nullptr' inputs.
+  SmallPtrSet<PHINode *, 4> NullptrPhis;
+  for (PHINode &PN : BB->phis()) {
+    if (!PN.getType()->isPointerTy())
+      continue;
+
+    for (Value *V : PN.incoming_values())
+      if (isa<ConstantPointerNull>(V)) {
+        NullptrPhis.insert(&PN);
+        break;
+      }
+  }
+  if (NullptrPhis.empty())
+    return {};
+
+  // Grab instructions that may be UB.
+  SmallDenseMap<PHINode *, SmallPtrSet<Instruction *, 4>> MaybeUBInsts;
+  for (PHINode *PN : NullptrPhis) {
+    for (Value *V : PN->materialized_users()) {
+      if (auto *LI = dyn_cast<LoadInst>(V)) {
+        if (LI->getParent() == BB && PN == LI->getPointerOperand())
+          MaybeUBInsts[PN].insert(LI);
+      } else if (auto *SI = dyn_cast<StoreInst>(V)) {
+        if (SI->getParent() == BB && PN == SI->getPointerOperand())
+          MaybeUBInsts[PN].insert(SI);
+      } else if (auto *GEP = dyn_cast<GetElementPtrInst>(V)) {
+        if (GEP->getParent() == BB && HasUBAccess(BB, GEP))
+          MaybeUBInsts[PN].insert(GEP);
+      }
+    }
+  }
+  if (MaybeUBInsts.empty())
+    return {};
+
+  // Get the first UB instruction.
+  PHINode *FirstUBPhiNode = nullptr;
+  Instruction *FirstUBInst = nullptr;
+  for (auto Element : MaybeUBInsts) {
+    PHINode *PN = Element.getFirst();
+    SmallPtrSetImpl<Instruction *> &Insts = Element.getSecond();
+
+    for (Instruction &I : *BB) {
+      if (&I == FirstUBInst)
+        break;
+
+      if (Insts.contains(&I)) {
+        FirstUBPhiNode = PN;
+        FirstUBInst = &I;
+        break;
+      }
+    }
+  }
+
+  return std::make_pair(FirstUBPhiNode, FirstUBInst);
+}
+
+/// Convert any accesses of a nullptr within the BB into a trap.
+bool IsolatePathPass::ProcessPointerUndefinedBehavior(BasicBlock *BB,
+                                                      DomTreeUpdater *DTU) {
+  if (!BB->canSplitPredecessors())
+    return false;
+
+  // Get the first UB instruction and associated PHI node.
+  auto [FirstUBPhiNode, FirstUBInst] = GetFirstUBInst(BB);
+  if (!FirstUBInst)
+    return false;
+
+  // Now that we have the first UB instruction and the PHI node associated with
+  // it, determine how to split the predecessors.
+  SmallPtrSet<BasicBlock *, 4> UBPhiPreds;
+  SmallPtrSet<BasicBlock *, 4> NonUBPhiPreds;
+  unsigned Index = 0;
+  for (Value *V : FirstUBPhiNode->incoming_values())
+    if (isa<ConstantPointerNull>(V))
+      UBPhiPreds.insert(FirstUBPhiNode->getIncomingBlock(Index++));
+    else
+      NonUBPhiPreds.insert(FirstUBPhiNode->getIncomingBlock(Index++));
+
+  SmallVector<DominatorTree::UpdateType, 8> Updates;
+  BasicBlock *UBBlock = nullptr;
+  if (NonUBPhiPreds.empty()) {
+    // All PHI node values cause UB in the block. Just add the 'trap'
+    // instruction without cloning.
+    UBBlock = BB;
+
+    // Remove the block from any successors.
+    for (BasicBlock *Succ : successors(BB)) {
+      Succ->removePredecessor(BB);
+      Updates.push_back({DominatorTree::Delete, BB, Succ});
+    }
+  } else {
+    // Clone the block, isolating the UB instructions on their own path.
+    ValueToValueMapTy VMap;
+    UBBlock = CloneBasicBlock(BB, VMap, ".ub.path", BB->getParent());
+    VMap[BB] = UBBlock;
+    ++NumIsolatedBlocks;
+
+    // Replace the UB predecessors' terminators' targets with the new block.
+    llvm::for_each(UBPhiPreds, [&](BasicBlock *Pred) {
+      Pred->getTerminator()->replaceSuccessorWith(BB, UBBlock);
+    });
+
+    // Remove predecessors of isolated paths from the original PHI nodes.
+    for (PHINode &PN : BB->phis())
+      PN.removeIncomingValueIf([&](unsigned I) {
+        return UBPhiPreds.contains(PN.getIncomingBlock(I));
+      });
+
+    // Remove predecessors of valid paths from the isolated path PHI nodes.
+    for (PHINode &PN : UBBlock->phis())
+      PN.removeIncomingValueIf([&](unsigned I) {
+        return NonUBPhiPreds.contains(PN.getIncomingBlock(I));
+      });
+
+    // Rewrite the instructions in the cloned block to refer to the instructions
+    // in the cloned block.
+    for (auto &I : *UBBlock) {
+      RemapDbgRecordRange(BB->getModule(), I.getDbgRecordRange(), VMap,
+                          RF_NoModuleLevelChanges | RF_IgnoreMissingLocals);
+      RemapInstruction(&I, VMap,
+                       RF_NoModuleLevelChanges | RF_IgnoreMissingLocals);
+    }
+
+    // Update the dominator tree.
+    for (auto *Pred : UBPhiPreds) {
+      Updates.push_back({DominatorTree::Insert, Pred, UBBlock});
+      Updates.push_back({DominatorTree::Delete, Pred, BB});
+    }
+  }
+
+  // Get the index into the block of the first UB instruction.
+  unsigned UBIndex = 0;
+  for (auto Iter = BB->begin(); Iter != BB->end(); ++Iter, ++UBIndex)
+    if (&*Iter == FirstUBInst) {
+      if (isa<LoadInst>(FirstUBInst))
+        ++UBIndex;
+      break;
+    }
+
+  // Remove the instructions following the nullptr dereference.
+  for (unsigned Index = UBBlock->size(); Index > UBIndex; --Index)
+    UBBlock->rbegin()->eraseFromParent();
+
+  // Allow the NULL dereference to actually occur so that code that wishes to
+  // catch the signal can do so.
+  if (const auto *LI = dyn_cast<LoadInst>(&*UBBlock->rbegin()))
+    const_cast<LoadInst *>(LI)->setVolatile(true);
+
+  // Add a 'trap()' call followed by an 'unreachable' terminator.
+  IRBuilder<> Builder(UBBlock);
+  Function *TrapDecl =
+      Intrinsic::getOrInsertDeclaration(BB->getModule(), Intrinsic::trap);
+  Builder.CreateCall(TrapDecl);
+  Builder.CreateUnreachable();
+
+  if (!Updates.empty())
+    DTU->applyUpdates(Updates);
+
+  SplitUBBlocks.insert(UBBlock);
+  return true;
+}
+
+PreservedAnalyses IsolatePathPass::run(Function &F,
+                                       FunctionAnalysisManager &FAM) {
+  bool Changed = false;
+
+  auto &DT = FAM.getResult<DominatorTreeAnalysis>(F);
+  auto &PDT = FAM.getResult<PostDominatorTreeAnalysis>(F);
+  DomTreeUpdater DTU(&DT, &PDT, DomTreeUpdater::UpdateStrategy::Eager);
+
+  // Use a worklist of blocks because we'll be adding new blocks to the
+  // function and potentially processing the same block multiple times.
+  std::vector<BasicBlock *> Blocks;
+  Blocks.reserve(F.size());
+  llvm::transform(F, std::back_inserter(Blocks),
+                  [](BasicBlock &BB) { return &BB; });
+
+  while (!Blocks.empty()) {
+    BasicBlock *BB = Blocks.back();
+    Blocks.pop_back();
+    if (SplitUBBlocks.contains(BB))
+      continue;
+
+    // No PHI nodes.
+    if (BB->phis().empty())
+      continue;
+
+    // Ignore landing and EH pads for now.
+    // FIXME: Should we support them?
+    if (BB->isLandingPad() || BB->isEHPad())
+      continue;
+
+    // Support some of the more common predecessor terminators.
+    // FIXME: Add support for 'SwitchInst'.
+    if (llvm::any_of(predecessors(BB), [&](BasicBlock *Pred) {
+          Instruction *TI = Pred->getTerminator();
+          return !isa<BranchInst>(TI) && !isa<ReturnInst>(TI) &&
+                 !isa<SwitchInst>(TI);
+        }))
+      continue;
+
+    if (auto *BI = dyn_cast<BranchInst>(BB->getTerminator()))
+      // If a BB has an edge to itself, then duplication of BB could result in
+      // reallocation of the BB's PHI nodes.
+      if (llvm::any_of(BI->successors(),
+                       [&](BasicBlock *B) { return B == BB; }))
+        continue;
+
+    if (ProcessPointerUndefinedBehavior(BB, &DTU)) {
+      // Reprocess the block to handle further UB instructions.
+      Blocks.push_back(BB);
+      Changed = true;
+    }
+  }
+
+  if (!Changed)
+    return PreservedAnalyses::all();
+
+  // FIXME: Should we update LoopInfo and LCCSA like in SplitBlockPredecessors?
+  PreservedAnalyses PA;
+  PA.preserve<DominatorTreeAnalysis>();
+  PA.preserve<PostDominatorTreeAnalysis>();
+  return PA;
+}
diff --git a/llvm/test/Other/new-pm-defaults.ll b/llvm/test/Other/new-pm-defaults.ll
index c554fdbf4c799..491759b3d1e1a 100644
--- a/llvm/test/Other/new-pm-defaults.ll
+++ b/llvm/test/Other/new-pm-defaults.ll
@@ -156,6 +156,8 @@
 ; CHECK-O-NEXT: Running pass: EarlyCSEPass
 ; CHECK-O-NEXT: Running analysis: MemorySSAAnalysis
 ; CHECK-O-NEXT: Running analysis: AAManager
+; CHECK-O23SZ-NEXT: Running pass: IsolatePathPass
+; CHECK-O23SZ-NEXT: Running analysis: PostDominatorTreeAnalysis
 ; CHECK-O23SZ-NEXT: Running pass: SpeculativeExecutionPass
 ; CHECK-O23SZ-NEXT: Running pass: JumpThreadingPass
 ; CHECK-O23SZ-NEXT: Running analysis: LazyValueAnalysis
@@ -215,7 +217,6 @@
 ; CHECK-O23SZ-NEXT: Invalidating analysis: LazyValueAnalysis
 ; CHECK-O1-NEXT: Running pass: CoroElidePass
 ; CHECK-O-NEXT: Running pass: ADCEPass
-; CHECK-O23SZ-NEXT: Running analysis: PostDominatorTreeAnalysis
 ; CHECK-O23SZ-NEXT: Running pass: MemCpyOptPass
 ; CHECK-O23SZ-NEXT: Running pass: DSEPass
 ; CHECK-O23SZ-NEXT: Running pass: MoveAutoInitPass on foo
diff --git a/llvm/test/Other/new-pm-thinlto-postlink-defaults.ll b/llvm/test/Other/new-pm-thinlto-postlink-defaults.ll
index 62bb02d9b3c40..6e038785eb763 100644
--- a/llvm/test/Other/new-pm-thinlto-postlink-defaults.ll
+++ b/llvm/test/Other/new-pm-thinlto-postlink-defaults.ll
@@ -86,6 +86,8 @@
 ; CHECK-O-NEXT: Running pass: EarlyCSEPass
 ; CHECK-O-NEXT: Running analysis: MemorySSAAnalysis
 ; CHECK-O-NEXT: Running analysis: AAManager
+; CHECK-O23SZ-NEXT: Running pass: IsolatePathPass
+; CHECK-O23SZ-NEXT: Running analysis: PostDominatorTreeAnalysis
 ; CHECK-O23SZ-NEXT: Running pass: SpeculativeExecutionPass
 ; CHECK-O23SZ-NEXT: Running pass: JumpThreadingPass
 ; CHECK-O23SZ-NEXT: Running analysis: LazyValueAnalysis
@@ -140,7 +142,6 @@
 ; CHECK-O23SZ-NEXT: Invalidating analysis: LazyValueAnalysis
 ; CHECK-O1-NEXT: Running pass: CoroElidePass
 ; CHECK-O-NEXT: Running pass: ADCEPass
-; CHECK-O23SZ-NEXT: Running analysis: PostDominatorTreeAnalysis
 ; CHECK-O23SZ-NEXT: Running pass: MemCpyOptPass
 ; CHECK-O23SZ-NEXT: Running pass: DSEPass
 ; CHECK-O23SZ-NEXT: Running pass: MoveAutoInitPass on foo
diff --git a/llvm/test/Other/new-pm-thinlto-postlink-pgo-defaults.ll b/llvm/test/Other/new-pm-thinlto-postlink-pgo-defaults.ll
index 0da7a9f73bdce..bec6d67302066 100644
--- a/llvm/test/Other/new-pm-thinlto-postlink-pgo-defaults.ll
+++ b/llvm/test/Other/new-pm-thinlto-postlink-pgo-defaults.ll
@@ -74,6 +74,7 @@
 ; CHECK-O-NEXT: Running pass: EarlyCSEPass
 ; CHECK-O-NEXT: Running analysis: MemorySSAAnalysis
 ; CHECK-O-NEXT: Running analysis: AAManager
+; CHECK-O23SZ-NEXT: Running pass: IsolatePathPass
 ; CHECK-O23SZ-NEXT: Running pass: SpeculativeExecutionPass
 ; CHECK-O23SZ-NEXT: Running pass: JumpThreadingPass
 ; CHECK-O23SZ-NEXT: Running analysis: LazyValueAnalysis
diff --git a/llvm/test/Other/new-pm-thinlto-postlink-samplepgo-defaults.ll b/llvm/test/Other/new-pm-thinlto-postlink-samplepgo-defaults.ll
index 38b7890682783..69ccba72c92e0 100644
--- a/llvm/test/Other/new-pm-thinlto-postlink-samplepgo-defaults.ll
+++ b/llvm/test/Other/new-pm-thinlto-postlink-samplepgo-defaults.ll
@@ -83,6 +83,7 @@
 ; CHECK-O-NEXT: Running pass: EarlyCSEPass
 ; CHECK-O-NEXT: Running analysis: MemorySSAAnalysis
 ; CHECK-O-NEXT: Running analysis: AAManager
+; CHECK-O23SZ-NEXT: Running pass: IsolatePathPass
 ; CHECK-O23SZ-NEXT: Running pass: SpeculativeExecutionPass
 ; CHECK-O23SZ-NEXT: Running pass: JumpThreadingPass
 ; CHECK-O23SZ-NEXT: Running analysis: LazyValueAnalysis
diff --git a/llvm/test/Other/new-pm-thinlto-prelink-defaults.ll b/llvm/test/Other/new-pm-thinlto-prelink-defaults.ll
index 5aacd26def2be..68f4d421bb6de 100644
--- a/llvm/test/Other/new-pm-thinlto-prelink-defaults.ll
+++ b/llvm/test/Other/new-pm-thinlto-prelink-defaults.ll
@@ -118,6 +118,8 @@
 ; CHECK-O-NEXT: Running pass: EarlyCSEPass
 ; CHECK-O-NEXT: Running analysis: MemorySSAAnalysis
 ; CHECK-O-NEXT: Running analysis: AAManager
+; CHECK-O23SZ-NEXT: Running pass: IsolatePathPass
+; CHECK-O23SZ-NEXT: Running analysis: PostDominatorTreeAnalysis
 ; CHECK-O23SZ-NEXT: Running pass: SpeculativeExecutionPass
 ; CHECK-O23SZ-NEXT: Running pass: JumpThreadingPass
 ; CHECK-O23SZ-NEXT: Running analysis: LazyValueAnalysis
@@ -172,7 +174,6 @@
 ; CHECK-O23SZ-NEXT: Invalidating analysis: LazyValueAnalysis
 ; CHECK-O1-NEXT: Running pass: CoroElidePass
 ; CHECK-O-NEXT: Running pass: ADCEPass
-; CHECK-O23SZ-NEXT: Running analysis: PostDominatorTreeAnalysis
 ; CHECK-O23SZ-NEXT: Running pass: MemCpyOptPass
 ; CHECK-O23SZ-NEXT: Running pass: DSEPass
 ; CHECK-O23SZ-NEXT: Running pass: MoveAutoInitPass
diff --git a/llvm/test/Other/new-pm-thinlto-prelink-pgo-defaults.ll b/llvm/test/Other/new-pm-thinlto-prelink-pgo-defaults.ll
index f6a9406596803..4842b7fb54bc4 100644
--- a/llvm/test/Other/new-pm-thinlto-prelink-pgo-defaults.ll
+++ b/llvm/test/Other/new-pm-thinlto-prelink-pgo-defaults.ll
@@ -116,6 +116,8 @@
 ; CHECK-O-NEXT: Running analysis: ScopedNoAliasAA
 ; CHECK-O-NEXT: Running analysis: TypeBasedAA
 ; CHECK-O-NEXT: Running analysis: OuterAnalysisManagerProxy
+; CHECK-O23SZ-NEXT: Running pass: IsolatePathPass
+; CHECK-O23SZ-NEXT: Running analysis: PostDominatorTreeAnalysis
 ; CHECK-O23SZ-NEXT: Running pass: SpeculativeExecutionPass
 ; CHECK-O23SZ-NEXT: Running pass: JumpThreadingPass
 ; CHECK-O23SZ-NEXT: Running analysis: LazyValueAnalysis
@@ -127,7 +129,7 @@
 ; CHECK-O-NEXT: Running analysis: BlockFrequencyAnalysis on foo
 ; CHECK-O-NEXT: Running analysis: BranchProbabilityAnalysis on foo
 ; CHECK-O-NEXT: Running analysis: LoopAnalysis on foo
-; CHECK-O-NEXT: Running analysis: PostDominatorTreeAnalysis on foo
+; CHECK-O1-NEXT: Running analysis: PostDominatorTreeAnalysis on foo
 ; CHECK-O23SZ-NEXT: Running pass: AggressiveInstCombinePass
 ; CHECK-O1-NEXT: Running pass: LibCallsShrinkWrapPass
 ; CHECK-O2-NEXT: Running pass: LibCallsShrinkWrapPass
diff --git a/llvm/test/Other/new-pm-thinlto-prelink-samplepgo-defaults.ll b/llvm/test/Other/new-pm-thinlto-prelink-samplepgo-defaults.ll
index 48a9433d24999..8bb9fff9e638f 100644
--- a/llvm/test/Other/new-pm-thinlto-prelink-samplepgo-defaults.ll
+++ b/llvm/test/Other/new-pm-thinlto-prelink-samplepgo-defaults.ll
@@ -88,6 +88,7 @@
 ; CHECK-O-NEXT: Running pass: EarlyCSEPass
 ; CHECK-O-NEXT: Running analysis: MemorySSAAnalysis
 ; CHECK-O-NEXT: Running analysis: AAManager
+; CHECK-O23SZ-NEXT: Running pass: IsolatePathPass
 ; CHECK-O23SZ-NEXT: Running pass: SpeculativeExecutionPass
 ; CHECK-O23SZ-NEXT: Running pass: JumpThreadingPass
 ; CHECK-O23SZ-NEXT: Running analysis: LazyValueAnalysis
diff --git a/llvm/test/Transforms/IsolatePath/ub-memory-accesses.ll b/llvm/test/Transforms/IsolatePath/ub-memory-accesses.ll
new file mode 100644
index 0000000000000..fa083bda048a3
--- /dev/null
+++ b/llvm/test/Transforms/IsolatePath/ub-memory-accesses.ll
@@ -0,0 +1,370 @@
+; NOTE: Assertions have been autogenerated by utils/update_test_checks.py UTC_ARGS: --version 5
+; RUN: opt -passes=isolate-path -S < %s | FileCheck %s
+
+%struct.demangle_component = type { i32, i32 }
+
+define dso_local ptr @test1(ptr noundef readonly captures(none) %di) {
+; CHECK-LABEL: define dso_local ptr @test1(
+; CHECK-SAME: ptr noundef readonly captures(none) [[DI:%.*]]) {
+; CHECK-NEXT:  [[ENTRY:.*]]:
+; CHECK-NEXT:    [[NEXT_COMP_I:%.*]] = getelementptr inbounds nuw i8, ptr [[DI]], i64 8
+; CHECK-NEXT:    [[TMP0:%.*]] = load i32, ptr [[NEXT_COMP_I]], align 8
+; CHECK-NEXT:    [[NUM_COMPS_I:%.*]] = getelementptr inbounds nuw i8, ptr [[DI]], i64 12
+; CHECK-NEXT:    [[TMP1:%.*]] = load i32, ptr [[NUM_COMPS_I]], align 4
+; CHECK-NEXT:    [[CMP_NOT_I:%.*]] = icmp slt i32 [[TMP0]], [[TMP1]]
+; CHECK-NEXT:    br i1 [[CMP_NOT_I]], label %[[IF_END_I:.*]], label %[[EXIT_UB_PATH:.*]]
+; CHECK:       [[IF_END_I]]:
+; CHECK-NEXT:    [[TMP2:%.*]] = load ptr, ptr [[DI]], align 8
+; CHECK-NEXT:    [[IDXPROM_I:%.*]] = sext i32 [[TMP0]] to i64
+; CHECK-NEXT:    [[ARRAYIDX_I:%.*]] = getelementptr inbounds [[STRUCT_DEMANGLE_COMPONENT:%.*]], ptr [[TMP2]], i64 [[IDXPROM_I]]
+; CHECK-NEXT:    br label %[[EXIT:.*]]
+; CHECK:       [[EXIT]]:
+; CHECK-NEXT:    [[RETVAL_0_I:%.*]] = phi ptr [ [[ARRAYIDX_I]], %[[IF_END_I]] ]
+; CHECK-NEXT:    [[TMP3:%.*]] = load i32, ptr [[RETVAL_0_I]], align 4
+; CHECK-NEXT:    call void @foo(i32 noundef [[TMP3]])
+; CHECK-NEXT:    [[ZZZ:%.*]] = getelementptr inbounds nuw i8, ptr [[RETVAL_0_I]], i64 4
+; CHECK-NEXT:    [[TMP4:%.*]] = load i32, ptr [[ZZZ]], align 4
+; CHECK-NEXT:    call void @bar(i32 noundef [[TMP4]])
+; CHECK-NEXT:    ret ptr [[RETVAL_0_I]]
+; CHECK:       [[EXIT_UB_PATH]]:
+; CHECK-NEXT:    [[RETVAL_0_I_UB_PATH:%.*]] = phi ptr [ null, %[[ENTRY]] ]
+; CHECK-NEXT:    [[TMP5:%.*]] = load volatile i32, ptr [[RETVAL_0_I_UB_PATH]], align 4
+; CHECK-NEXT:    call void @llvm.trap()
+; CHECK-NEXT:    unreachable
+;
+entry:
+  %next_comp.i = getelementptr inbounds nuw i8, ptr %di, i64 8
+  %0 = load i32, ptr %next_comp.i, align 8
+  %num_comps.i = getelementptr inbounds nuw i8, ptr %di, i64 12
+  %1 = load i32, ptr %num_comps.i, align 4
+  %cmp.not.i = icmp slt i32 %0, %1
+  br i1 %cmp.not.i, label %if.end.i, label %exit
+
+if.end.i:
+  %2 = load ptr, ptr %di, align 8
+  %idxprom.i = sext i32 %0 to i64
+  %arrayidx.i = getelementptr inbounds %struct.demangle_component, ptr %2, i64 %idxprom.i
+  br label %exit
+
+exit:
+  %retval.0.i = phi ptr [ %arrayidx.i, %if.end.i ], [ null, %entry ]
+  %3 = load i32, ptr %retval.0.i, align 4
+  call void @foo(i32 noundef %3)
+  %zzz = getelementptr inbounds nuw i8, ptr %retval.0.i, i64 4
+  %4 = load i32, ptr %zzz, align 4
+  call void @bar(i32 noundef %4)
+  ret ptr %retval.0.i
+}
+
+define dso_local ptr @test2(ptr noundef readonly captures(none) %di) {
+; CHECK-LABEL: define dso_local ptr @test2(
+; CHECK-SAME: ptr noundef readonly captures(none) [[DI:%.*]]) {
+; CHECK-NEXT:  [[ENTRY:.*]]:
+; CHECK-NEXT:    [[NEXT_COMP_I:%.*]] = getelementptr inbounds nuw i8, ptr [[DI]], i64 8
+; CHECK-NEXT:    [[TMP0:%.*]] = load i32, ptr [[NEXT_COMP_I]], align 8
+; CHECK-NEXT:    [[NUM_COMPS_I:%.*]] = getelementptr inbounds nuw i8, ptr [[DI]], i64 12
+; CHECK-NEXT:    [[TMP1:%.*]] = load i32, ptr [[NUM_COMPS_I]], align 4
+; CHECK-NEXT:    [[CMP_NOT_I:%.*]] = icmp slt i32 [[TMP0]], [[TMP1]]
+; CHECK-NEXT:    br i1 [[CMP_NOT_I]], label %[[IF_END_I:.*]], label %[[EXIT:.*]]
+; CHECK:       [[IF_END_I]]:
+; CHECK-NEXT:    [[TMP2:%.*]] = load ptr, ptr [[DI]], align 8
+; CHECK-NEXT:    [[IDXPROM_I:%.*]] = sext i32 [[TMP0]] to i64
+; CHECK-NEXT:    [[ARRAYIDX_I:%.*]] = getelementptr inbounds [[STRUCT_DEMANGLE_COMPONENT:%.*]], ptr [[TMP2]], i64 [[IDXPROM_I]]
+; CHECK-NEXT:    [[CMP:%.*]] = icmp slt i32 [[TMP0]], [[TMP1]]
+; CHECK-NEXT:    br i1 [[CMP]], label %[[EXIT]], label %[[RETURN:.*]]
+; CHECK:       [[EXIT]]:
+; CHECK-NEXT:    [[RETVAL_0_I:%.*]] = phi ptr [ null, %[[IF_END_I]] ], [ null, %[[ENTRY]] ]
+; CHECK-NEXT:    [[TMP3:%.*]] = load volatile i32, ptr [[RETVAL_0_I]], align 4
+; CHECK-NEXT:    call void @llvm.trap()
+; CHECK-NEXT:    unreachable
+; CHECK:       [[RETURN]]:
+; CHECK-NEXT:    ret ptr null
+;
+entry:
+  %next_comp.i = getelementptr inbounds nuw i8, ptr %di, i64 8
+  %0 = load i32, ptr %next_comp.i, align 8
+  %num_comps.i = getelementptr inbounds nuw i8, ptr %di, i64 12
+  %1 = load i32, ptr %num_comps.i, align 4
+  %cmp.not.i = icmp slt i32 %0, %1
+  br i1 %cmp.not.i, label %if.end.i, label %exit
+
+if.end.i:
+  %2 = load ptr, ptr %di, align 8
+  %idxprom.i = sext i32 %0 to i64
+  %arrayidx.i = getelementptr inbounds %struct.demangle_component, ptr %2, i64 %idxprom.i
+  %cmp = icmp slt i32 %0, %1
+  br i1 %cmp, label %exit, label %return
+
+exit:
+  %retval.0.i = phi ptr [ null, %if.end.i ], [ null, %entry ]
+  %3 = load i32, ptr %retval.0.i, align 4
+  call void @foo(i32 noundef %3)
+  %zzz = getelementptr inbounds nuw i8, ptr %retval.0.i, i64 4
+  %4 = load i32, ptr %zzz, align 4
+  call void @bar(i32 noundef %4)
+  br label %return
+
+return:
+  ret ptr null
+}
+
+define dso_local ptr @test3(i32 %choice, ptr noundef %di, ptr noundef %si) {
+; CHECK-LABEL: define dso_local ptr @test3(
+; CHECK-SAME: i32 [[CHOICE:%.*]], ptr noundef [[DI:%.*]], ptr noundef [[SI:%.*]]) {
+; CHECK-NEXT:  [[ENTRY:.*]]:
+; CHECK-NEXT:    [[NEXT_COMP_I:%.*]] = getelementptr inbounds nuw i8, ptr [[DI]], i64 8
+; CHECK-NEXT:    [[TMP0:%.*]] = load i32, ptr [[NEXT_COMP_I]], align 8
+; CHECK-NEXT:    [[NUM_COMPS_I:%.*]] = getelementptr inbounds nuw i8, ptr [[DI]], i64 12
+; CHECK-NEXT:    [[TMP1:%.*]] = load i32, ptr [[NUM_COMPS_I]], align 4
+; CHECK-NEXT:    [[CMP_NOT_I:%.*]] = icmp slt i32 [[TMP0]], [[TMP1]]
+; CHECK-NEXT:    switch i32 [[CHOICE]], label %[[SW_EPILOG_UB_PATH:.*]] [
+; CHECK-NEXT:      i32 42, label %[[SW_BB:.*]]
+; CHECK-NEXT:      i32 3, label %[[SW_BB1:.*]]
+; CHECK-NEXT:    ]
+; CHECK:       [[SW_BB]]:
+; CHECK-NEXT:    [[TMP2:%.*]] = load ptr, ptr [[DI]], align 8
+; CHECK-NEXT:    [[IDXPROM_I:%.*]] = sext i32 [[TMP0]] to i64
+; CHECK-NEXT:    [[ARRAYIDX_I:%.*]] = getelementptr inbounds [[STRUCT_DEMANGLE_COMPONENT:%.*]], ptr [[TMP2]], i64 [[IDXPROM_I]]
+; CHECK-NEXT:    br label %[[SW_EPILOG:.*]]
+; CHECK:       [[SW_BB1]]:
+; CHECK-NEXT:    [[SI_VAL:%.*]] = load ptr, ptr [[SI]], align 8
+; CHECK-NEXT:    [[IDXPROM_I_1:%.*]] = sext i32 [[TMP0]] to i64
+; CHECK-NEXT:    [[ARRAYIDX_I_1:%.*]] = getelementptr inbounds [[STRUCT_DEMANGLE_COMPONENT]], ptr [[SI_VAL]], i64 [[IDXPROM_I_1]]
+; CHECK-NEXT:    br label %[[SW_EPILOG]]
+; CHECK:       [[SW_EPILOG]]:
+; CHECK-NEXT:    [[RETVAL_0_I:%.*]] = phi ptr [ [[ARRAYIDX_I]], %[[SW_BB]] ], [ [[ARRAYIDX_I_1]], %[[SW_BB1]] ]
+; CHECK-NEXT:    [[TMP3:%.*]] = load i32, ptr [[RETVAL_0_I]], align 4
+; CHECK-NEXT:    call void @foo(i32 noundef [[TMP3]])
+; CHECK-NEXT:    [[ZZZ:%.*]] = getelementptr inbounds nuw i8, ptr [[RETVAL_0_I]], i64 4
+; CHECK-NEXT:    [[TMP4:%.*]] = load i32, ptr [[ZZZ]], align 4
+; CHECK-NEXT:    call void @bar(i32 noundef [[TMP4]])
+; CHECK-NEXT:    br label %[[EXIT:.*]]
+; CHECK:       [[EXIT]]:
+; CHECK-NEXT:    ret ptr [[RETVAL_0_I]]
+; CHECK:       [[SW_EPILOG_UB_PATH]]:
+; CHECK-NEXT:    [[RETVAL_0_I_UB_PATH:%.*]] = phi ptr [ null, %[[ENTRY]] ]
+; CHECK-NEXT:    [[TMP5:%.*]] = load volatile i32, ptr [[RETVAL_0_I_UB_PATH]], align 4
+; CHECK-NEXT:    call void @llvm.trap()
+; CHECK-NEXT:    unreachable
+;
+entry:
+  %next_comp.i = getelementptr inbounds nuw i8, ptr %di, i64 8
+  %0 = load i32, ptr %next_comp.i, align 8
+  %num_comps.i = getelementptr inbounds nuw i8, ptr %di, i64 12
+  %1 = load i32, ptr %num_comps.i, align 4
+  %cmp.not.i = icmp slt i32 %0, %1
+  switch i32 %choice, label %sw.epilog [
+  i32 42, label %sw.bb
+  i32 3, label %sw.bb1
+  ]
+
+sw.bb:
+  %2 = load ptr, ptr %di, align 8
+  %idxprom.i = sext i32 %0 to i64
+  %arrayidx.i = getelementptr inbounds %struct.demangle_component, ptr %2, i64 %idxprom.i
+  br label %sw.epilog
+
+sw.bb1:
+  %si.val = load ptr, ptr %si, align 8
+  %idxprom.i.1 = sext i32 %0 to i64
+  %arrayidx.i.1 = getelementptr inbounds %struct.demangle_component, ptr %si.val, i64 %idxprom.i.1
+  br label %sw.epilog
+
+sw.epilog:
+  %retval.0.i = phi ptr [ %arrayidx.i, %sw.bb ], [ %arrayidx.i.1, %sw.bb1], [ null, %entry ]
+  %3 = load i32, ptr %retval.0.i, align 4
+  call void @foo(i32 noundef %3)
+  %zzz = getelementptr inbounds nuw i8, ptr %retval.0.i, i64 4
+  %4 = load i32, ptr %zzz, align 4
+  call void @bar(i32 noundef %4)
+  br label %exit
+
+exit:
+  ret ptr %retval.0.i
+}
+
+define dso_local ptr @test4(ptr noundef readonly captures(none) %di) {
+; CHECK-LABEL: define dso_local ptr @test4(
+; CHECK-SAME: ptr noundef readonly captures(none) [[DI:%.*]]) {
+; CHECK-NEXT:  [[ENTRY:.*]]:
+; CHECK-NEXT:    [[NEXT_COMP_I:%.*]] = getelementptr inbounds nuw i8, ptr [[DI]], i64 8
+; CHECK-NEXT:    [[TMP0:%.*]] = load i32, ptr [[NEXT_COMP_I]], align 8
+; CHECK-NEXT:    [[NUM_COMPS_I:%.*]] = getelementptr inbounds nuw i8, ptr [[DI]], i64 12
+; CHECK-NEXT:    [[TMP1:%.*]] = load i32, ptr [[NUM_COMPS_I]], align 4
+; CHECK-NEXT:    [[CMP_NOT_I:%.*]] = icmp slt i32 [[TMP0]], [[TMP1]]
+; CHECK-NEXT:    br i1 [[CMP_NOT_I]], label %[[IF_END_I:.*]], label %[[EXIT:.*]]
+; CHECK:       [[IF_END_I]]:
+; CHECK-NEXT:    [[TMP2:%.*]] = load ptr, ptr [[DI]], align 8
+; CHECK-NEXT:    [[IDXPROM_I:%.*]] = sext i32 [[TMP0]] to i64
+; CHECK-NEXT:    [[ARRAYIDX_I:%.*]] = getelementptr inbounds [[STRUCT_DEMANGLE_COMPONENT:%.*]], ptr [[TMP2]], i64 [[IDXPROM_I]]
+; CHECK-NEXT:    call void @bar(ptr noundef [[ARRAYIDX_I]])
+; CHECK-NEXT:    br label %[[EXIT]]
+; CHECK:       [[EXIT]]:
+; CHECK-NEXT:    [[RETVAL_0_I:%.*]] = phi ptr [ null, %[[IF_END_I]] ], [ null, %[[ENTRY]] ]
+; CHECK-NEXT:    [[TMP3:%.*]] = load volatile i32, ptr [[RETVAL_0_I]], align 4
+; CHECK-NEXT:    call void @llvm.trap()
+; CHECK-NEXT:    unreachable
+;
+entry:
+  %next_comp.i = getelementptr inbounds nuw i8, ptr %di, i64 8
+  %0 = load i32, ptr %next_comp.i, align 8
+  %num_comps.i = getelementptr inbounds nuw i8, ptr %di, i64 12
+  %1 = load i32, ptr %num_comps.i, align 4
+  %cmp.not.i = icmp slt i32 %0, %1
+  br i1 %cmp.not.i, label %if.end.i, label %exit
+
+if.end.i:
+  %2 = load ptr, ptr %di, align 8
+  %idxprom.i = sext i32 %0 to i64
+  %arrayidx.i = getelementptr inbounds %struct.demangle_component, ptr %2, i64 %idxprom.i
+  call void @bar(ptr noundef %arrayidx.i)
+  br label %exit
+
+exit:
+  %retval.0.i = phi ptr [ null, %if.end.i ], [ null, %entry ]
+  %3 = load i32, ptr %retval.0.i, align 4
+  call void @foo(i32 noundef %3)
+  %zzz = getelementptr inbounds nuw i8, ptr %retval.0.i, i64 4
+  %4 = load i32, ptr %zzz, align 4
+  call void @bar(i32 noundef %4)
+  ret ptr %retval.0.i
+}
+
+define dso_local ptr @test5(ptr noundef readonly captures(none) %di) {
+; CHECK-LABEL: define dso_local ptr @test5(
+; CHECK-SAME: ptr noundef readonly captures(none) [[DI:%.*]]) {
+; CHECK-NEXT:  [[ENTRY:.*]]:
+; CHECK-NEXT:    [[NEXT_COMP_I:%.*]] = getelementptr inbounds nuw i8, ptr [[DI]], i64 8
+; CHECK-NEXT:    [[TMP0:%.*]] = load i32, ptr [[NEXT_COMP_I]], align 8
+; CHECK-NEXT:    [[NUM_COMPS_I:%.*]] = getelementptr inbounds nuw i8, ptr [[DI]], i64 12
+; CHECK-NEXT:    [[TMP1:%.*]] = load i32, ptr [[NUM_COMPS_I]], align 4
+; CHECK-NEXT:    [[CMP_NOT_I:%.*]] = icmp slt i32 [[TMP0]], [[TMP1]]
+; CHECK-NEXT:    br i1 [[CMP_NOT_I]], label %[[IF_END_I:.*]], label %[[D_MAKE_EMPTY_EXIT_UB_PATH:.*]]
+; CHECK:       [[IF_END_I]]:
+; CHECK-NEXT:    [[TMP2:%.*]] = load ptr, ptr [[DI]], align 8
+; CHECK-NEXT:    [[IDXPROM_I:%.*]] = sext i32 [[TMP0]] to i64
+; CHECK-NEXT:    [[ARRAYIDX_I:%.*]] = getelementptr inbounds [[STRUCT_DEMANGLE_COMPONENT:%.*]], ptr [[TMP2]], i64 [[IDXPROM_I]]
+; CHECK-NEXT:    br label %[[D_MAKE_EMPTY_EXIT:.*]]
+; CHECK:       [[D_MAKE_EMPTY_EXIT]]:
+; CHECK-NEXT:    [[RETVAL_0_I:%.*]] = phi ptr [ [[ARRAYIDX_I]], %[[IF_END_I]] ]
+; CHECK-NEXT:    store i32 42, ptr [[RETVAL_0_I]], align 4
+; CHECK-NEXT:    ret ptr [[RETVAL_0_I]]
+; CHECK:       [[D_MAKE_EMPTY_EXIT_UB_PATH]]:
+; CHECK-NEXT:    [[RETVAL_0_I_UB_PATH:%.*]] = phi ptr [ null, %[[ENTRY]] ]
+; CHECK-NEXT:    call void @llvm.trap()
+; CHECK-NEXT:    unreachable
+;
+entry:
+  %next_comp.i = getelementptr inbounds nuw i8, ptr %di, i64 8
+  %0 = load i32, ptr %next_comp.i, align 8
+  %num_comps.i = getelementptr inbounds nuw i8, ptr %di, i64 12
+  %1 = load i32, ptr %num_comps.i, align 4
+  %cmp.not.i = icmp slt i32 %0, %1
+  br i1 %cmp.not.i, label %if.end.i, label %d_make_empty.exit
+
+if.end.i:                                         ; preds = %entry
+  %2 = load ptr, ptr %di, align 8
+  %idxprom.i = sext i32 %0 to i64
+  %arrayidx.i = getelementptr inbounds %struct.demangle_component, ptr %2, i64 %idxprom.i
+  br label %d_make_empty.exit
+
+d_make_empty.exit:                                ; preds = %entry, %if.end.i
+  %retval.0.i = phi ptr [ %arrayidx.i, %if.end.i ], [ null, %entry ]
+  store i32 42, ptr %retval.0.i, align 4
+  ret ptr %retval.0.i
+}
+
+define dso_local ptr @test6(ptr noundef readonly captures(none) %di) {
+; CHECK-LABEL: define dso_local ptr @test6(
+; CHECK-SAME: ptr noundef readonly captures(none) [[DI:%.*]]) {
+; CHECK-NEXT:  [[ENTRY:.*]]:
+; CHECK-NEXT:    [[NEXT_COMP_I:%.*]] = getelementptr inbounds nuw i8, ptr [[DI]], i64 8
+; CHECK-NEXT:    [[TMP0:%.*]] = load i32, ptr [[NEXT_COMP_I]], align 8
+; CHECK-NEXT:    [[NUM_COMPS_I:%.*]] = getelementptr inbounds nuw i8, ptr [[DI]], i64 12
+; CHECK-NEXT:    [[TMP1:%.*]] = load i32, ptr [[NUM_COMPS_I]], align 4
+; CHECK-NEXT:    [[CMP_NOT_I:%.*]] = icmp slt i32 [[TMP0]], [[TMP1]]
+; CHECK-NEXT:    br i1 [[CMP_NOT_I]], label %[[IF_END_I:.*]], label %[[D_MAKE_EMPTY_EXIT:.*]]
+; CHECK:       [[IF_END_I]]:
+; CHECK-NEXT:    [[TMP2:%.*]] = load ptr, ptr [[DI]], align 8
+; CHECK-NEXT:    [[IDXPROM_I:%.*]] = sext i32 [[TMP0]] to i64
+; CHECK-NEXT:    [[ARRAYIDX_I:%.*]] = getelementptr inbounds [[STRUCT_DEMANGLE_COMPONENT:%.*]], ptr [[TMP2]], i64 [[IDXPROM_I]]
+; CHECK-NEXT:    [[CMP:%.*]] = icmp slt i32 [[TMP0]], [[TMP1]]
+; CHECK-NEXT:    br i1 [[CMP]], label %[[D_MAKE_EMPTY_EXIT]], label %[[RETURN:.*]]
+; CHECK:       [[D_MAKE_EMPTY_EXIT]]:
+; CHECK-NEXT:    [[RETVAL_0_I:%.*]] = phi ptr [ null, %[[IF_END_I]] ], [ null, %[[ENTRY]] ]
+; CHECK-NEXT:    call void @llvm.trap()
+; CHECK-NEXT:    unreachable
+; CHECK:       [[RETURN]]:
+; CHECK-NEXT:    ret ptr null
+;
+entry:
+  %next_comp.i = getelementptr inbounds nuw i8, ptr %di, i64 8
+  %0 = load i32, ptr %next_comp.i, align 8
+  %num_comps.i = getelementptr inbounds nuw i8, ptr %di, i64 12
+  %1 = load i32, ptr %num_comps.i, align 4
+  %cmp.not.i = icmp slt i32 %0, %1
+  br i1 %cmp.not.i, label %if.end.i, label %d_make_empty.exit
+
+if.end.i:                                         ; preds = %entry
+  %2 = load ptr, ptr %di, align 8
+  %idxprom.i = sext i32 %0 to i64
+  %arrayidx.i = getelementptr inbounds %struct.demangle_component, ptr %2, i64 %idxprom.i
+  %cmp = icmp slt i32 %0, %1
+  br i1 %cmp, label %d_make_empty.exit, label %return
+
+d_make_empty.exit:                                ; preds = %entry, %if.end.i
+  %retval.0.i = phi ptr [ null, %if.end.i ], [ null, %entry ]
+  %zzz = getelementptr inbounds nuw i8, ptr %retval.0.i, i64 4
+  store i32 -1, ptr %zzz, align 4
+  br label %return
+
+return:
+  ret ptr null
+}
+
+define i32 @test7() local_unnamed_addr {
+; CHECK-LABEL: define i32 @test7() local_unnamed_addr {
+; CHECK-NEXT:  [[ENTRY:.*]]:
+; CHECK-NEXT:    [[TMP0:%.*]] = load ptr, ptr null, align 8
+; CHECK-NEXT:    [[TMP1:%.*]] = load i32, ptr inttoptr (i64 128 to ptr), align 8
+; CHECK-NEXT:    switch i32 [[TMP1]], label %[[SW_DEFAULT:.*]] [
+; CHECK-NEXT:      i32 1, label %[[SW_EXIT:.*]]
+; CHECK-NEXT:      i32 7, label %[[SW_EXIT]]
+; CHECK-NEXT:      i32 8, label %[[SW_EXIT]]
+; CHECK-NEXT:      i32 0, label %[[SW_EXIT]]
+; CHECK-NEXT:    ]
+; CHECK:       [[SW_DEFAULT]]:
+; CHECK-NEXT:    br label %[[SW_EXIT_UB_PATH:.*]]
+; CHECK:       [[SW_EXIT]]:
+; CHECK-NEXT:    [[COMMON_RET1_OP_I_I:%.*]] = phi ptr [ [[TMP0]], %[[ENTRY]] ], [ [[TMP0]], %[[ENTRY]] ], [ [[TMP0]], %[[ENTRY]] ], [ [[TMP0]], %[[ENTRY]] ]
+; CHECK-NEXT:    [[DP_I:%.*]] = getelementptr i8, ptr [[COMMON_RET1_OP_I_I]], i64 480
+; CHECK-NEXT:    store i64 0, ptr [[DP_I]], align 8
+; CHECK-NEXT:    ret i32 0
+; CHECK:       [[SW_EXIT_UB_PATH]]:
+; CHECK-NEXT:    [[COMMON_RET1_OP_I_I_UB_PATH:%.*]] = phi ptr [ null, %[[SW_DEFAULT]] ]
+; CHECK-NEXT:    call void @llvm.trap()
+; CHECK-NEXT:    unreachable
+;
+entry:
+  %0 = load ptr, ptr null, align 8
+  %1 = load i32, ptr inttoptr (i64 128 to ptr), align 8
+  switch i32 %1, label %sw.default [
+  i32 1, label %sw.exit
+  i32 7, label %sw.exit
+  i32 8, label %sw.exit
+  i32 0, label %sw.exit
+  ]
+
+sw.default:                                      ; preds = %entry
+  br label %sw.exit
+
+sw.exit:                             ; preds = %if.else.i.i, %entry, %entry, %entry, %entry
+  %common.ret1.op.i.i = phi ptr [ null, %sw.default ], [ %0, %entry ], [ %0, %entry ], [ %0, %entry ], [ %0, %entry ]
+  %dp.i = getelementptr i8, ptr %common.ret1.op.i.i, i64 480
+  store i64 0, ptr %dp.i, align 8
+  ret i32 0
+}
+
+declare void @foo(i32 noundef)
+
+declare void @bar(i32 noundef)



More information about the llvm-commits mailing list