[llvm] [SCEV] Require that addrec operands dominate the loop (PR #67030)

Nikita Popov via llvm-commits llvm-commits at lists.llvm.org
Thu Sep 21 08:24:18 PDT 2023


https://github.com/nikic created https://github.com/llvm/llvm-project/pull/67030

SCEVExpander currently has special handling for the case where the start or the step of an addrec do not dominate the loop header, which is not used by any lit test.

Initially I thought that this is entirely dead code, because addrec operands are required to be loop invariant. However, SCEV currently allows creating an addrec with operands that are loop invariant but defined *after* the loop.

This doesn't seem like a useful case to allow and I'm almost certain this would get treated incorrectly in many places. The only place this is currently used is a single unit test, and I don't think that one is intentional either.

Am I missing something here?

>From baf59ef734d40fc102b98f857a720c081876dc6e Mon Sep 17 00:00:00 2001
From: Nikita Popov <npopov at redhat.com>
Date: Thu, 21 Sep 2023 16:58:05 +0200
Subject: [PATCH] [SCEV] Require that addrec operands dominate the loop

SCEVExpander currently has special handling for the case where the
start or the step of an addrec do not dominate the loop header,
which is not used by any lit test.

Initially I thought that this is entirely dead code, because
addrec operands are required to be loop invariant. However,
SCEV currently allows creating an addrec with operands that are
loop invariant but defined *after* the loop.

This doesn't seem like a useful case to allow, and we don't
appear to be using this outside a single easy to adjust unit test.

Am I missing something here?
---
 llvm/lib/Analysis/ScalarEvolution.cpp         |  5 +-
 .../Utils/ScalarEvolutionExpander.cpp         | 64 ++-----------------
 .../Utils/ScalarEvolutionExpanderTest.cpp     |  6 +-
 3 files changed, 12 insertions(+), 63 deletions(-)

diff --git a/llvm/lib/Analysis/ScalarEvolution.cpp b/llvm/lib/Analysis/ScalarEvolution.cpp
index 00b1af73671c041..fdd64e3abdb32ab 100644
--- a/llvm/lib/Analysis/ScalarEvolution.cpp
+++ b/llvm/lib/Analysis/ScalarEvolution.cpp
@@ -3670,9 +3670,12 @@ ScalarEvolution::getAddRecExpr(SmallVectorImpl<const SCEV *> &Operands,
            "SCEVAddRecExpr operand types don't match!");
     assert(!Operands[i]->getType()->isPointerTy() && "Step must be integer");
   }
-  for (unsigned i = 0, e = Operands.size(); i != e; ++i)
+  for (unsigned i = 0, e = Operands.size(); i != e; ++i) {
     assert(isLoopInvariant(Operands[i], L) &&
            "SCEVAddRecExpr operand is not loop-invariant!");
+    assert(properlyDominates(Operands[i], L->getHeader()) &&
+           "SCEVAddRecExpr operand does not dominate loop!");
+  }
 #endif
 
   if (Operands.back()->isZero()) {
diff --git a/llvm/lib/Transforms/Utils/ScalarEvolutionExpander.cpp b/llvm/lib/Transforms/Utils/ScalarEvolutionExpander.cpp
index f939160df7a86c7..5ccfaa71495979a 100644
--- a/llvm/lib/Transforms/Utils/ScalarEvolutionExpander.cpp
+++ b/llvm/lib/Transforms/Utils/ScalarEvolutionExpander.cpp
@@ -1074,40 +1074,14 @@ Value *SCEVExpander::expandAddRecExprLiterally(const SCEVAddRecExpr *S) {
         normalizeForPostIncUse(S, Loops, SE, /*CheckInvertible=*/false));
   }
 
-  // Strip off any non-loop-dominating component from the addrec start.
   const SCEV *Start = Normalized->getStart();
-  const SCEV *PostLoopOffset = nullptr;
-  if (!SE.properlyDominates(Start, L->getHeader())) {
-    PostLoopOffset = Start;
-    Start = SE.getConstant(Normalized->getType(), 0);
-    Normalized = cast<SCEVAddRecExpr>(
-      SE.getAddRecExpr(Start, Normalized->getStepRecurrence(SE),
-                       Normalized->getLoop(),
-                       Normalized->getNoWrapFlags(SCEV::FlagNW)));
-  }
-
-  // Strip off any non-loop-dominating component from the addrec step.
   const SCEV *Step = Normalized->getStepRecurrence(SE);
-  const SCEV *PostLoopScale = nullptr;
-  if (!SE.dominates(Step, L->getHeader())) {
-    PostLoopScale = Step;
-    Step = SE.getConstant(Normalized->getType(), 1);
-    if (!Start->isZero()) {
-        // The normalization below assumes that Start is constant zero, so if
-        // it isn't re-associate Start to PostLoopOffset.
-        assert(!PostLoopOffset && "Start not-null but PostLoopOffset set?");
-        PostLoopOffset = Start;
-        Start = SE.getConstant(Normalized->getType(), 0);
-    }
-    Normalized =
-      cast<SCEVAddRecExpr>(SE.getAddRecExpr(
-                             Start, Step, Normalized->getLoop(),
-                             Normalized->getNoWrapFlags(SCEV::FlagNW)));
-  }
+  assert(SE.properlyDominates(Start, L->getHeader()) &&
+         "Start does not properly dominate loop header");
+  assert(SE.dominates(Step, L->getHeader()) && "Step not dominate loop header");
 
-  // Expand the core addrec. If we need post-loop scaling, force it to
-  // expand to an integer type to avoid the need for additional casting.
-  Type *ExpandTy = PostLoopScale ? IntTy : STy;
+  // Expand the core addrec.
+  Type *ExpandTy = STy;
   // We can't use a pointer type for the addrec if the pointer type is
   // non-integral.
   Type *AddRecPHIExpandTy =
@@ -1188,28 +1162,6 @@ Value *SCEVExpander::expandAddRecExprLiterally(const SCEVAddRecExpr *S) {
                                  Result);
   }
 
-  // Re-apply any non-loop-dominating scale.
-  if (PostLoopScale) {
-    assert(S->isAffine() && "Can't linearly scale non-affine recurrences.");
-    Result = InsertNoopCastOfTo(Result, IntTy);
-    Result = Builder.CreateMul(Result, expandCodeFor(PostLoopScale, IntTy));
-  }
-
-  // Re-apply any non-loop-dominating offset.
-  if (PostLoopOffset) {
-    if (isa<PointerType>(ExpandTy)) {
-      if (Result->getType()->isIntegerTy()) {
-        Value *Base = expandCodeFor(PostLoopOffset, ExpandTy);
-        Result = expandAddToGEP(SE.getUnknown(Result), Base);
-      } else {
-        Result = expandAddToGEP(PostLoopOffset, Result);
-      }
-    } else {
-      Result = InsertNoopCastOfTo(Result, IntTy);
-      Result = Builder.CreateAdd(Result, expandCodeFor(PostLoopOffset, IntTy));
-    }
-  }
-
   return Result;
 }
 
@@ -2339,12 +2291,6 @@ struct SCEVFindUnsafe {
       }
     }
     if (const SCEVAddRecExpr *AR = dyn_cast<SCEVAddRecExpr>(S)) {
-      const SCEV *Step = AR->getStepRecurrence(SE);
-      if (!AR->isAffine() && !SE.dominates(Step, AR->getLoop()->getHeader())) {
-        IsUnsafe = true;
-        return false;
-      }
-
       // For non-affine addrecs or in non-canonical mode we need a preheader
       // to insert into.
       if (!AR->getLoop()->getLoopPreheader() &&
diff --git a/llvm/unittests/Transforms/Utils/ScalarEvolutionExpanderTest.cpp b/llvm/unittests/Transforms/Utils/ScalarEvolutionExpanderTest.cpp
index ca2fc71c4d38012..eb27e69da47faee 100644
--- a/llvm/unittests/Transforms/Utils/ScalarEvolutionExpanderTest.cpp
+++ b/llvm/unittests/Transforms/Utils/ScalarEvolutionExpanderTest.cpp
@@ -129,13 +129,13 @@ TEST_F(ScalarEvolutionExpanderTest, SCEVZeroExtendExprNonIntegral) {
    * top:
    *  br label %L.ph
    * L.ph:
+   *  %gepbase = getelementptr i64 addrspace(10)* %arg, i64 1
    *  br label %L
    * L:
    *  %phi = phi i64 [i64 0, %L.ph], [ %add, %L2 ]
    *  %add = add i64 %phi2, 1
    *  br i1 undef, label %post, label %L2
    * post:
-   *  %gepbase = getelementptr i64 addrspace(10)* %arg, i64 1
    *  #= %gep = getelementptr i64 addrspace(10)* %gepbase, i64 %add =#
    *  ret void
    *
@@ -170,6 +170,8 @@ TEST_F(ScalarEvolutionExpanderTest, SCEVZeroExtendExprNonIntegral) {
   Builder.CreateBr(LPh);
 
   Builder.SetInsertPoint(LPh);
+  Value *GepBase =
+      Builder.CreateGEP(T_int64, Arg, ConstantInt::get(T_int64, 1));
   Builder.CreateBr(L);
 
   Builder.SetInsertPoint(L);
@@ -180,8 +182,6 @@ TEST_F(ScalarEvolutionExpanderTest, SCEVZeroExtendExprNonIntegral) {
   Phi->addIncoming(Add, L);
 
   Builder.SetInsertPoint(Post);
-  Value *GepBase =
-      Builder.CreateGEP(T_int64, Arg, ConstantInt::get(T_int64, 1));
   Instruction *Ret = Builder.CreateRetVoid();
 
   ScalarEvolution SE = buildSE(*F);



More information about the llvm-commits mailing list