[llvm] [RISCV] Take SEW/LMUL into account for value tracking of vsetvli[max] (PR #82163)

via llvm-commits llvm-commits at lists.llvm.org
Sun Feb 18 04:52:42 PST 2024


llvmbot wrote:


<!--LLVM PR SUMMARY COMMENT-->

@llvm/pr-subscribers-llvm-transforms

Author: Wang Pengcheng (wangpc-pp)

<details>
<summary>Changes</summary>

So that we can benefit from some instcombine optimizations.

This PR contains two commits: the first is for adding tests and the second is for the optimization.


---

Patch is 40.83 KiB, truncated to 20.00 KiB below, full version: https://github.com/llvm/llvm-project/pull/82163.diff


3 Files Affected:

- (modified) llvm/lib/Analysis/ValueTracking.cpp (+24-5) 
- (modified) llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvli-knownbits.ll (+555-49) 
- (added) llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvlimax-knownbits.ll (+627) 


``````````diff
diff --git a/llvm/lib/Analysis/ValueTracking.cpp b/llvm/lib/Analysis/ValueTracking.cpp
index 1a076adb1bad0a..693ec80d831f0b 100644
--- a/llvm/lib/Analysis/ValueTracking.cpp
+++ b/llvm/lib/Analysis/ValueTracking.cpp
@@ -73,6 +73,7 @@
 #include "llvm/Support/ErrorHandling.h"
 #include "llvm/Support/KnownBits.h"
 #include "llvm/Support/MathExtras.h"
+#include "llvm/TargetParser/RISCVTargetParser.h"
 #include <algorithm>
 #include <cassert>
 #include <cstdint>
@@ -1576,12 +1577,30 @@ static void computeKnownBitsFromOperator(const Operator *I,
         Known.Zero.setBitsFrom(32);
         break;
       case Intrinsic::riscv_vsetvli:
-      case Intrinsic::riscv_vsetvlimax:
-        // Assume that VL output is <= 65536.
-        // TODO: Take SEW and LMUL into account.
-        if (BitWidth > 17)
-          Known.Zero.setBitsFrom(17);
+      case Intrinsic::riscv_vsetvlimax: {
+        bool HasAVL = II->getIntrinsicID() == Intrinsic::riscv_vsetvli;
+        const ConstantRange &Range =
+            getVScaleRange(II->getFunction(), BitWidth);
+        uint64_t SEW =
+            1 << (cast<ConstantInt>(II->getArgOperand(HasAVL))->getZExtValue() +
+                  3);
+        uint64_t LMUL =
+            cast<ConstantInt>(II->getArgOperand(1 + HasAVL))->getZExtValue();
+        bool Fractional = LMUL > 4;
+        uint64_t MaxVL =
+            Range.getLower().getZExtValue() * RISCV::RVVBitsPerBlock / SEW;
+        MaxVL = Fractional ? MaxVL / (1 << (8 - LMUL)) : MaxVL * (1 << LMUL);
+
+        // Result of vsetvli must be not larger than AVL.
+        if (HasAVL)
+          if (auto *CI = dyn_cast<ConstantInt>(II->getArgOperand(0)))
+            MaxVL = std::min(MaxVL, CI->getZExtValue());
+
+        unsigned KnownZeroFirstBit = Log2_32(MaxVL) + 1;
+        if (BitWidth > KnownZeroFirstBit)
+          Known.Zero.setBitsFrom(KnownZeroFirstBit);
         break;
+      }
       case Intrinsic::vscale: {
         if (!II->getParent() || !II->getFunction())
           break;
diff --git a/llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvli-knownbits.ll b/llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvli-knownbits.ll
index 51f78688b13edb..d88db2832360d8 100644
--- a/llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvli-knownbits.ll
+++ b/llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvli-knownbits.ll
@@ -3,8 +3,6 @@
 
 declare i32 @llvm.riscv.vsetvli.i32(i32, i32, i32)
 declare i64 @llvm.riscv.vsetvli.i64(i64, i64, i64)
-declare i32 @llvm.riscv.vsetvlimax.i32(i32, i32)
-declare i64 @llvm.riscv.vsetvlimax.i64(i64, i64)
 
 define i32 @vsetvli_i32() nounwind {
 ; CHECK-LABEL: @vsetvli_i32(
@@ -68,64 +66,572 @@ entry:
   ret i64 %1
 }
 
-define i32 @vsetvlimax_i32() nounwind {
-; CHECK-LABEL: @vsetvlimax_i32(
-; CHECK-NEXT:  entry:
-; CHECK-NEXT:    [[TMP0:%.*]] = call i32 @llvm.riscv.vsetvlimax.i32(i32 1, i32 1)
-; CHECK-NEXT:    ret i32 [[TMP0]]
+define i64 @vsetvl_e8m1_and14bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e8m1_and14bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 0)
+; CHECK-NEXT:    ret i64 [[A]]
 ;
-entry:
-  %0 = call i32 @llvm.riscv.vsetvlimax.i32(i32 1, i32 1)
-  %1 = and i32 %0, 2147483647
-  ret i32 %1
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 0)
+  %b = and i64 %a, 16383
+  ret i64 %b
 }
 
-define i64 @vsetvlimax_sext_i64() nounwind {
-; CHECK-LABEL: @vsetvlimax_sext_i64(
-; CHECK-NEXT:  entry:
-; CHECK-NEXT:    [[TMP0:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
-; CHECK-NEXT:    ret i64 [[TMP0]]
+define i64 @vsetvl_e8m1_and13bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e8m1_and13bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 0)
+; CHECK-NEXT:    ret i64 [[A]]
 ;
-entry:
-  %0 = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
-  %1 = trunc i64 %0 to i32
-  %2 = sext i32 %1 to i64
-  ret i64 %2
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 0)
+  %b = and i64 %a, 8191
+  ret i64 %b
 }
 
-define i64 @vsetvlimax_zext_i64() nounwind {
-; CHECK-LABEL: @vsetvlimax_zext_i64(
-; CHECK-NEXT:  entry:
-; CHECK-NEXT:    [[TMP0:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
-; CHECK-NEXT:    ret i64 [[TMP0]]
+define i64 @vsetvl_e8m1_constant_avl() {
+; CHECK-LABEL: @vsetvl_e8m1_constant_avl(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 1, i64 0, i64 0)
+; CHECK-NEXT:    ret i64 [[A]]
 ;
-entry:
-  %0 = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
-  %1 = trunc i64 %0 to i32
-  %2 = zext i32 %1 to i64
-  ret i64 %2
+  %a = call i64 @llvm.riscv.vsetvli(i64 1, i64 0, i64 0)
+  %b = and i64 %a, 1
+  ret i64 %b
 }
 
-define i32 @vsetvlimax_and17_i32() nounwind {
-; CHECK-LABEL: @vsetvlimax_and17_i32(
-; CHECK-NEXT:  entry:
-; CHECK-NEXT:    [[TMP0:%.*]] = call i32 @llvm.riscv.vsetvlimax.i32(i32 1, i32 1)
-; CHECK-NEXT:    ret i32 [[TMP0]]
+define i64 @vsetvl_e8m2_and15bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e8m2_and15bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 1)
+; CHECK-NEXT:    ret i64 [[A]]
 ;
-entry:
-  %0 = call i32 @llvm.riscv.vsetvlimax.i32(i32 1, i32 1)
-  %1 = and i32 %0, 131071
-  ret i32 %1
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 1)
+  %b = and i64 %a, 32767
+  ret i64 %b
 }
 
-define i64 @vsetvlimax_and17_i64() nounwind {
-; CHECK-LABEL: @vsetvlimax_and17_i64(
-; CHECK-NEXT:  entry:
-; CHECK-NEXT:    [[TMP0:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
-; CHECK-NEXT:    ret i64 [[TMP0]]
+define i64 @vsetvl_e8m2_and14bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e8m2_and14bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 1)
+; CHECK-NEXT:    ret i64 [[A]]
 ;
-entry:
-  %0 = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
-  %1 = and i64 %0, 131071
-  ret i64 %1
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 1)
+  %b = and i64 %a, 16383
+  ret i64 %b
+}
+
+define i64 @vsetvl_e8m4_and16bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e8m4_and16bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 2)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 2)
+  %b = and i64 %a, 65535
+  ret i64 %b
+}
+
+define i64 @vsetvl_e8m4_and15bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e8m4_and15bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 2)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 2)
+  %b = and i64 %a, 32767
+  ret i64 %b
+}
+
+define i64 @vsetvl_e8m8_and17bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e8m8_and17bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 3)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 3)
+  %b = and i64 %a, 131071
+  ret i64 %b
+}
+
+define i64 @vsetvl_e8m8_and16bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e8m8_and16bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 3)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 3)
+  %b = and i64 %a, 65535
+  ret i64 %b
+}
+
+define i64 @vsetvl_e8mf2_and11bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e8mf2_and11bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 5)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 5)
+  %b = and i64 %a, 2047
+  ret i64 %b
+}
+
+define i64 @vsetvl_e8mf2_and10bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e8mf2_and10bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 5)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 5)
+  %b = and i64 %a, 1023
+  ret i64 %b
+}
+
+define i64 @vsetvl_e8mf4_and12bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e8mf4_and12bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 6)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 6)
+  %b = and i64 %a, 4095
+  ret i64 %b
+}
+
+define i64 @vsetvl_e8mf4_and11bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e8mf4_and11bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 6)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 6)
+  %b = and i64 %a, 2047
+  ret i64 %b
+}
+
+define i64 @vsetvl_e8mf8_and13bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e8mf8_and13bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 7)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 7)
+  %b = and i64 %a, 8191
+  ret i64 %b
+}
+
+define i64 @vsetvl_e8mf8_and12bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e8mf8_and12bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 7)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 7)
+  %b = and i64 %a, 4095
+  ret i64 %b
+}
+
+define i64 @vsetvl_e16m1_and13bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e16m1_and13bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 0)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 0)
+  %b = and i64 %a, 8191
+  ret i64 %b
+}
+
+define i64 @vsetvl_e16m1_and12bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e16m1_and12bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 0)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 0)
+  %b = and i64 %a, 4095
+  ret i64 %b
+}
+
+define i64 @vsetvl_e16m2_and14bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e16m2_and14bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 1)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 1)
+  %b = and i64 %a, 16383
+  ret i64 %b
+}
+
+define i64 @vsetvl_e16m2_and13bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e16m2_and13bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 1)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 1)
+  %b = and i64 %a, 8191
+  ret i64 %b
+}
+
+define i64 @vsetvl_e16m4_and15bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e16m4_and15bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 2)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 2)
+  %b = and i64 %a, 32767
+  ret i64 %b
+}
+
+define i64 @vsetvl_e16m4_and14bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e16m4_and14bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 2)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 2)
+  %b = and i64 %a, 16383
+  ret i64 %b
+}
+
+define i64 @vsetvl_e16m8_and16bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e16m8_and16bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 3)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 3)
+  %b = and i64 %a, 65535
+  ret i64 %b
+}
+
+define i64 @vsetvl_e16m8_and15bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e16m8_and15bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 3)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 3)
+  %b = and i64 %a, 32767
+  ret i64 %b
+}
+
+define i64 @vsetvl_e16mf2_and10bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e16mf2_and10bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 5)
+; CHECK-NEXT:    ret i64 0
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 5)
+  %b = and i64 %a, 1023
+  ret i64 %b
+}
+
+define i64 @vsetvl_e16mf2_and9bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e16mf2_and9bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 5)
+; CHECK-NEXT:    ret i64 0
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 5)
+  %b = and i64 %a, 511
+  ret i64 %b
+}
+
+define i64 @vsetvl_e16mf4_and11bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e16mf4_and11bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 6)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 6)
+  %b = and i64 %a, 2047
+  ret i64 %b
+}
+
+define i64 @vsetvl_e16mf4_and10bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e16mf4_and10bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 6)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 6)
+  %b = and i64 %a, 1023
+  ret i64 %b
+}
+
+define i64 @vsetvl_e16mf8_and12bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e16mf8_and12bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 7)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 7)
+  %b = and i64 %a, 4095
+  ret i64 %b
+}
+
+define i64 @vsetvl_e16mf8_and11bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e16mf8_and11bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 7)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 7)
+  %b = and i64 %a, 2047
+  ret i64 %b
+}
+
+define i64 @vsetvl_e32m1_and12bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e32m1_and12bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 0)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 0)
+  %b = and i64 %a, 4095
+  ret i64 %b
+}
+
+define i64 @vsetvl_e32m1_and11bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e32m1_and11bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 0)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 0)
+  %b = and i64 %a, 2047
+  ret i64 %b
+}
+
+define i64 @vsetvl_e32m2_and13bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e32m2_and13bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 1)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 1)
+  %b = and i64 %a, 8191
+  ret i64 %b
+}
+
+define i64 @vsetvl_e32m2_and12bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e32m2_and12bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 1)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 1)
+  %b = and i64 %a, 4095
+  ret i64 %b
+}
+
+define i64 @vsetvl_e32m4_and14bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e32m4_and14bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 2)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 2)
+  %b = and i64 %a, 16383
+  ret i64 %b
+}
+
+define i64 @vsetvl_e32m4_and13bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e32m4_and13bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 2)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 2)
+  %b = and i64 %a, 8191
+  ret i64 %b
+}
+
+define i64 @vsetvl_e32m8_and15bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e32m8_and15bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 3)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 3)
+  %b = and i64 %a, 32767
+  ret i64 %b
+}
+
+define i64 @vsetvl_e32m8_and14bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e32m8_and14bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 3)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 3)
+  %b = and i64 %a, 16383
+  ret i64 %b
+}
+
+define i64 @vsetvl_e32mf2_and9bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e32mf2_and9bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 5)
+; CHECK-NEXT:    ret i64 0
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 5)
+  %b = and i64 %a, 511
+  ret i64 %b
+}
+
+define i64 @vsetvl_e32mf2_and8bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e32mf2_and8bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 5)
+; CHECK-NEXT:    ret i64 0
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 5)
+  %b = and i64 %a, 255
+  ret i64 %b
+}
+
+define i64 @vsetvl_e32mf4_and10bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e32mf4_and10bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 6)
+; CHECK-NEXT:    ret i64 0
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 6)
+  %b = and i64 %a, 1023
+  ret i64 %b
+}
+
+define i64 @vsetvl_e32mf4_and9bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e32mf4_and9bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 6)
+; CHECK-NEXT:    ret i64 0
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 6)
+  %b = and i64 %a, 511
+  ret i64 %b
+}
+
+define i64 @vsetvl_e32mf8_and11bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e32mf8_and11bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 7)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 7)
+  %b = and i64 %a, 2047
+  ret i64 %b
+}
+
+define i64 @vsetvl_e32mf8_and10bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e32mf8_and10bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 7)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 7)
+  %b = and i64 %a, 1023
+  ret i64 %b
+}
+
+define i64 @vsetvl_e64m1_and11bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e64m1_and11bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 0)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 0)
+  %b = and i64 %a, 2047
+  ret i64 %b
+}
+
+define i64 @vsetvl_e64m1_and10bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e64m1_and10bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 0)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 0)
+  %b = and i64 %a, 1023
+  ret i64 %b
+}
+
+define i64 @vsetvl_e64m2_and12bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e64m2_and12bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 1)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 1)
+  %b = and i64 %a, 4095
+  ret i64 %b
+}
+
+define i64 @vsetvl_e64m2_and11bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e64m2_and11bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 1)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 1)
+  %b = and i64 %a, 2047
+  ret i64 %b
+}
+
+define i64 @vsetvl_e64m4_and13bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e64m4_and13bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 2)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 2)
+  %b = and i64 %a, 8191
+  ret i64 %b
+}
+
+define i64 @vsetvl_e64m4_and12bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e64m4_and12bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 2)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 2)
+  %b = and i64 %a, 4095
+  ret i64 %b
+}
+
+define i64 @vsetvl_e64m8_and14bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e64m8_and14bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 3)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 3)
+  %b = and i64 ...
[truncated]

``````````

</details>


https://github.com/llvm/llvm-project/pull/82163


More information about the llvm-commits mailing list