[llvm] [RISCV] Take SEW/LMUL into account for value tracking of vsetvli[max] (PR #82163)

Wang Pengcheng via llvm-commits llvm-commits at lists.llvm.org
Tue Mar 5 21:30:20 PST 2024


https://github.com/wangpc-pp updated https://github.com/llvm/llvm-project/pull/82163

>From f7be89bf590657f45c857c6e4840abbb0eb90686 Mon Sep 17 00:00:00 2001
From: Wang Pengcheng <wangpengcheng.pp at bytedance.com>
Date: Sun, 18 Feb 2024 20:39:05 +0800
Subject: [PATCH 1/4] [RISCV] Add tests for vsetvli/vsetvlimax with different
 SEW/LMUL

---
 .../RISCV/riscv-vsetvli-knownbits.ll          | 694 +++++++++++++++--
 .../RISCV/riscv-vsetvlimax-knownbits.ll       | 706 ++++++++++++++++++
 2 files changed, 1346 insertions(+), 54 deletions(-)
 create mode 100644 llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvlimax-knownbits.ll

diff --git a/llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvli-knownbits.ll b/llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvli-knownbits.ll
index 51f78688b13edb..e254d2a71b7f53 100644
--- a/llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvli-knownbits.ll
+++ b/llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvli-knownbits.ll
@@ -3,10 +3,8 @@
 
 declare i32 @llvm.riscv.vsetvli.i32(i32, i32, i32)
 declare i64 @llvm.riscv.vsetvli.i64(i64, i64, i64)
-declare i32 @llvm.riscv.vsetvlimax.i32(i32, i32)
-declare i64 @llvm.riscv.vsetvlimax.i64(i64, i64)
 
-define i32 @vsetvli_i32() nounwind {
+define i32 @vsetvli_i32() nounwind #0 {
 ; CHECK-LABEL: @vsetvli_i32(
 ; CHECK-NEXT:  entry:
 ; CHECK-NEXT:    [[TMP0:%.*]] = call i32 @llvm.riscv.vsetvli.i32(i32 1, i32 1, i32 1)
@@ -18,7 +16,7 @@ entry:
   ret i32 %1
 }
 
-define i64 @vsetvli_sext_i64() nounwind {
+define i64 @vsetvli_sext_i64() nounwind #0 {
 ; CHECK-LABEL: @vsetvli_sext_i64(
 ; CHECK-NEXT:  entry:
 ; CHECK-NEXT:    [[TMP0:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 1, i64 1, i64 1)
@@ -31,7 +29,7 @@ entry:
   ret i64 %2
 }
 
-define i64 @vsetvli_zext_i64() nounwind {
+define i64 @vsetvli_zext_i64() nounwind #0 {
 ; CHECK-LABEL: @vsetvli_zext_i64(
 ; CHECK-NEXT:  entry:
 ; CHECK-NEXT:    [[TMP0:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 1, i64 1, i64 1)
@@ -44,7 +42,29 @@ entry:
   ret i64 %2
 }
 
-define i32 @vsetvli_and17_i32() nounwind {
+define signext i32 @vsetvl_sext() nounwind #0 {
+; CHECK-LABEL: @vsetvl_sext(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 1, i64 1, i64 1)
+; CHECK-NEXT:    [[B:%.*]] = trunc i64 [[A]] to i32
+; CHECK-NEXT:    ret i32 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 1, i64 1, i64 1)
+  %b = trunc i64 %a to i32
+  ret i32 %b
+}
+
+define zeroext i32 @vsetvl_zext() nounwind #0 {
+; CHECK-LABEL: @vsetvl_zext(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 1, i64 1, i64 1)
+; CHECK-NEXT:    [[B:%.*]] = trunc i64 [[A]] to i32
+; CHECK-NEXT:    ret i32 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 1, i64 1, i64 1)
+  %b = trunc i64 %a to i32
+  ret i32 %b
+}
+
+define i32 @vsetvli_and17_i32() nounwind #0 {
 ; CHECK-LABEL: @vsetvli_and17_i32(
 ; CHECK-NEXT:  entry:
 ; CHECK-NEXT:    [[TMP0:%.*]] = call i32 @llvm.riscv.vsetvli.i32(i32 1, i32 1, i32 1)
@@ -56,7 +76,7 @@ entry:
   ret i32 %1
 }
 
-define i64 @vsetvli_and17_i64() nounwind {
+define i64 @vsetvli_and17_i64() nounwind #0 {
 ; CHECK-LABEL: @vsetvli_and17_i64(
 ; CHECK-NEXT:  entry:
 ; CHECK-NEXT:    [[TMP0:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 1, i64 1, i64 1)
@@ -68,64 +88,630 @@ entry:
   ret i64 %1
 }
 
-define i32 @vsetvlimax_i32() nounwind {
-; CHECK-LABEL: @vsetvlimax_i32(
-; CHECK-NEXT:  entry:
-; CHECK-NEXT:    [[TMP0:%.*]] = call i32 @llvm.riscv.vsetvlimax.i32(i32 1, i32 1)
-; CHECK-NEXT:    ret i32 [[TMP0]]
+define i64 @vsetvl_e8m1_and14bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e8m1_and14bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 0)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 16383
+; CHECK-NEXT:    ret i64 [[B]]
 ;
-entry:
-  %0 = call i32 @llvm.riscv.vsetvlimax.i32(i32 1, i32 1)
-  %1 = and i32 %0, 2147483647
-  ret i32 %1
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 0)
+  %b = and i64 %a, 16383
+  ret i64 %b
 }
 
-define i64 @vsetvlimax_sext_i64() nounwind {
-; CHECK-LABEL: @vsetvlimax_sext_i64(
-; CHECK-NEXT:  entry:
-; CHECK-NEXT:    [[TMP0:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
-; CHECK-NEXT:    ret i64 [[TMP0]]
+define i64 @vsetvl_e8m1_and13bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e8m1_and13bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 0)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 8191
+; CHECK-NEXT:    ret i64 [[B]]
 ;
-entry:
-  %0 = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
-  %1 = trunc i64 %0 to i32
-  %2 = sext i32 %1 to i64
-  ret i64 %2
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 0)
+  %b = and i64 %a, 8191
+  ret i64 %b
 }
 
-define i64 @vsetvlimax_zext_i64() nounwind {
-; CHECK-LABEL: @vsetvlimax_zext_i64(
-; CHECK-NEXT:  entry:
-; CHECK-NEXT:    [[TMP0:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
-; CHECK-NEXT:    ret i64 [[TMP0]]
+define i64 @vsetvl_e8m1_constant_avl() nounwind #0 {
+; CHECK-LABEL: @vsetvl_e8m1_constant_avl(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 1, i64 0, i64 0)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 1
+; CHECK-NEXT:    ret i64 [[B]]
 ;
-entry:
-  %0 = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
-  %1 = trunc i64 %0 to i32
-  %2 = zext i32 %1 to i64
-  ret i64 %2
+  %a = call i64 @llvm.riscv.vsetvli(i64 1, i64 0, i64 0)
+  %b = and i64 %a, 1
+  ret i64 %b
 }
 
-define i32 @vsetvlimax_and17_i32() nounwind {
-; CHECK-LABEL: @vsetvlimax_and17_i32(
-; CHECK-NEXT:  entry:
-; CHECK-NEXT:    [[TMP0:%.*]] = call i32 @llvm.riscv.vsetvlimax.i32(i32 1, i32 1)
-; CHECK-NEXT:    ret i32 [[TMP0]]
+define i64 @vsetvl_e8m2_and15bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e8m2_and15bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 1)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 32767
+; CHECK-NEXT:    ret i64 [[B]]
 ;
-entry:
-  %0 = call i32 @llvm.riscv.vsetvlimax.i32(i32 1, i32 1)
-  %1 = and i32 %0, 131071
-  ret i32 %1
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 1)
+  %b = and i64 %a, 32767
+  ret i64 %b
 }
 
-define i64 @vsetvlimax_and17_i64() nounwind {
-; CHECK-LABEL: @vsetvlimax_and17_i64(
-; CHECK-NEXT:  entry:
-; CHECK-NEXT:    [[TMP0:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
-; CHECK-NEXT:    ret i64 [[TMP0]]
+define i64 @vsetvl_e8m2_and14bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e8m2_and14bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 1)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 16383
+; CHECK-NEXT:    ret i64 [[B]]
 ;
-entry:
-  %0 = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
-  %1 = and i64 %0, 131071
-  ret i64 %1
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 1)
+  %b = and i64 %a, 16383
+  ret i64 %b
+}
+
+define i64 @vsetvl_e8m4_and16bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e8m4_and16bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 2)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 65535
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 2)
+  %b = and i64 %a, 65535
+  ret i64 %b
+}
+
+define i64 @vsetvl_e8m4_and15bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e8m4_and15bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 2)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 32767
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 2)
+  %b = and i64 %a, 32767
+  ret i64 %b
+}
+
+define i64 @vsetvl_e8m8_and17bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e8m8_and17bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 3)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 3)
+  %b = and i64 %a, 131071
+  ret i64 %b
+}
+
+define i64 @vsetvl_e8m8_and16bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e8m8_and16bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 3)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 65535
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 3)
+  %b = and i64 %a, 65535
+  ret i64 %b
+}
+
+define i64 @vsetvl_e8mf2_and11bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e8mf2_and11bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 5)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 2047
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 5)
+  %b = and i64 %a, 2047
+  ret i64 %b
+}
+
+define i64 @vsetvl_e8mf2_and10bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e8mf2_and10bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 5)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 1023
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 5)
+  %b = and i64 %a, 1023
+  ret i64 %b
+}
+
+define i64 @vsetvl_e8mf4_and12bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e8mf4_and12bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 6)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 4095
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 6)
+  %b = and i64 %a, 4095
+  ret i64 %b
+}
+
+define i64 @vsetvl_e8mf4_and11bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e8mf4_and11bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 6)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 2047
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 6)
+  %b = and i64 %a, 2047
+  ret i64 %b
+}
+
+define i64 @vsetvl_e8mf8_and13bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e8mf8_and13bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 7)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 8191
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 7)
+  %b = and i64 %a, 8191
+  ret i64 %b
+}
+
+define i64 @vsetvl_e8mf8_and12bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e8mf8_and12bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 7)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 4095
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 7)
+  %b = and i64 %a, 4095
+  ret i64 %b
+}
+
+define i64 @vsetvl_e16m1_and13bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e16m1_and13bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 0)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 8191
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 0)
+  %b = and i64 %a, 8191
+  ret i64 %b
+}
+
+define i64 @vsetvl_e16m1_and12bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e16m1_and12bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 0)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 4095
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 0)
+  %b = and i64 %a, 4095
+  ret i64 %b
+}
+
+define i64 @vsetvl_e16m2_and14bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e16m2_and14bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 1)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 16383
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 1)
+  %b = and i64 %a, 16383
+  ret i64 %b
+}
+
+define i64 @vsetvl_e16m2_and13bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e16m2_and13bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 1)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 8191
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 1)
+  %b = and i64 %a, 8191
+  ret i64 %b
+}
+
+define i64 @vsetvl_e16m4_and15bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e16m4_and15bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 2)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 32767
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 2)
+  %b = and i64 %a, 32767
+  ret i64 %b
+}
+
+define i64 @vsetvl_e16m4_and14bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e16m4_and14bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 2)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 16383
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 2)
+  %b = and i64 %a, 16383
+  ret i64 %b
+}
+
+define i64 @vsetvl_e16m8_and16bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e16m8_and16bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 3)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 65535
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 3)
+  %b = and i64 %a, 65535
+  ret i64 %b
+}
+
+define i64 @vsetvl_e16m8_and15bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e16m8_and15bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 3)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 32767
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 3)
+  %b = and i64 %a, 32767
+  ret i64 %b
+}
+
+define i64 @vsetvl_e16mf2_and10bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e16mf2_and10bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 5)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 1023
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 5)
+  %b = and i64 %a, 1023
+  ret i64 %b
+}
+
+define i64 @vsetvl_e16mf2_and9bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e16mf2_and9bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 5)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 511
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 5)
+  %b = and i64 %a, 511
+  ret i64 %b
+}
+
+define i64 @vsetvl_e16mf4_and11bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e16mf4_and11bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 6)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 2047
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 6)
+  %b = and i64 %a, 2047
+  ret i64 %b
+}
+
+define i64 @vsetvl_e16mf4_and10bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e16mf4_and10bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 6)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 1023
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 6)
+  %b = and i64 %a, 1023
+  ret i64 %b
+}
+
+define i64 @vsetvl_e16mf8_and12bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e16mf8_and12bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 7)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 4095
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 7)
+  %b = and i64 %a, 4095
+  ret i64 %b
+}
+
+define i64 @vsetvl_e16mf8_and11bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e16mf8_and11bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 7)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 2047
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 7)
+  %b = and i64 %a, 2047
+  ret i64 %b
+}
+
+define i64 @vsetvl_e32m1_and12bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e32m1_and12bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 0)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 4095
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 0)
+  %b = and i64 %a, 4095
+  ret i64 %b
+}
+
+define i64 @vsetvl_e32m1_and11bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e32m1_and11bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 0)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 2047
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 0)
+  %b = and i64 %a, 2047
+  ret i64 %b
+}
+
+define i64 @vsetvl_e32m2_and13bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e32m2_and13bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 1)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 8191
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 1)
+  %b = and i64 %a, 8191
+  ret i64 %b
+}
+
+define i64 @vsetvl_e32m2_and12bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e32m2_and12bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 1)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 4095
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 1)
+  %b = and i64 %a, 4095
+  ret i64 %b
+}
+
+define i64 @vsetvl_e32m4_and14bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e32m4_and14bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 2)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 16383
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 2)
+  %b = and i64 %a, 16383
+  ret i64 %b
+}
+
+define i64 @vsetvl_e32m4_and13bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e32m4_and13bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 2)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 8191
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 2)
+  %b = and i64 %a, 8191
+  ret i64 %b
+}
+
+define i64 @vsetvl_e32m8_and15bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e32m8_and15bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 3)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 32767
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 3)
+  %b = and i64 %a, 32767
+  ret i64 %b
+}
+
+define i64 @vsetvl_e32m8_and14bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e32m8_and14bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 3)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 16383
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 3)
+  %b = and i64 %a, 16383
+  ret i64 %b
+}
+
+define i64 @vsetvl_e32mf2_and9bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e32mf2_and9bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 5)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 511
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 5)
+  %b = and i64 %a, 511
+  ret i64 %b
+}
+
+define i64 @vsetvl_e32mf2_and8bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e32mf2_and8bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 5)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 255
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 5)
+  %b = and i64 %a, 255
+  ret i64 %b
+}
+
+define i64 @vsetvl_e32mf4_and10bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e32mf4_and10bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 6)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 1023
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 6)
+  %b = and i64 %a, 1023
+  ret i64 %b
+}
+
+define i64 @vsetvl_e32mf4_and9bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e32mf4_and9bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 6)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 511
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 6)
+  %b = and i64 %a, 511
+  ret i64 %b
 }
+
+define i64 @vsetvl_e32mf8_and11bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e32mf8_and11bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 7)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 2047
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 7)
+  %b = and i64 %a, 2047
+  ret i64 %b
+}
+
+define i64 @vsetvl_e32mf8_and10bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e32mf8_and10bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 7)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 1023
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 7)
+  %b = and i64 %a, 1023
+  ret i64 %b
+}
+
+define i64 @vsetvl_e64m1_and11bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e64m1_and11bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 0)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 2047
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 0)
+  %b = and i64 %a, 2047
+  ret i64 %b
+}
+
+define i64 @vsetvl_e64m1_and10bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e64m1_and10bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 0)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 1023
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 0)
+  %b = and i64 %a, 1023
+  ret i64 %b
+}
+
+define i64 @vsetvl_e64m2_and12bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e64m2_and12bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 1)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 4095
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 1)
+  %b = and i64 %a, 4095
+  ret i64 %b
+}
+
+define i64 @vsetvl_e64m2_and11bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e64m2_and11bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 1)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 2047
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 1)
+  %b = and i64 %a, 2047
+  ret i64 %b
+}
+
+define i64 @vsetvl_e64m4_and13bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e64m4_and13bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 2)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 8191
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 2)
+  %b = and i64 %a, 8191
+  ret i64 %b
+}
+
+define i64 @vsetvl_e64m4_and12bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e64m4_and12bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 2)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 4095
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 2)
+  %b = and i64 %a, 4095
+  ret i64 %b
+}
+
+define i64 @vsetvl_e64m8_and14bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e64m8_and14bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 3)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 16383
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 3)
+  %b = and i64 %a, 16383
+  ret i64 %b
+}
+
+define i64 @vsetvl_e64m8_and13bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e64m8_and13bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 3)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 8191
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 3)
+  %b = and i64 %a, 8191
+  ret i64 %b
+}
+
+define i64 @vsetvl_e64mf2_and8bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e64mf2_and8bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 5)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 255
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 5)
+  %b = and i64 %a, 255
+  ret i64 %b
+}
+
+define i64 @vsetvl_e64mf2_and7bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e64mf2_and7bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 5)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 127
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 5)
+  %b = and i64 %a, 127
+  ret i64 %b
+}
+
+define i64 @vsetvl_e64mf4_and9bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e64mf4_and9bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 6)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 511
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 6)
+  %b = and i64 %a, 511
+  ret i64 %b
+}
+
+define i64 @vsetvl_e64mf4_and8bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e64mf4_and8bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 6)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 255
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 6)
+  %b = and i64 %a, 255
+  ret i64 %b
+}
+
+define i64 @vsetvl_e64mf8_and10bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e64mf8_and10bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 7)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 1023
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 7)
+  %b = and i64 %a, 1023
+  ret i64 %b
+}
+
+define i64 @vsetvl_e64mf8_and9bits(i64 %avl) nounwind #0 {
+; CHECK-LABEL: @vsetvl_e64mf8_and9bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 7)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 511
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 7)
+  %b = and i64 %a, 511
+  ret i64 %b
+}
+
+attributes #0 = { vscale_range(2,1024) }
diff --git a/llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvlimax-knownbits.ll b/llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvlimax-knownbits.ll
new file mode 100644
index 00000000000000..72033b3af09f33
--- /dev/null
+++ b/llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvlimax-knownbits.ll
@@ -0,0 +1,706 @@
+; NOTE: Assertions have been autogenerated by utils/update_test_checks.py
+; RUN: opt < %s -passes=instcombine -S | FileCheck %s
+
+declare i32 @llvm.riscv.vsetvlimax.i32(i32, i32)
+declare i64 @llvm.riscv.vsetvlimax.i64(i64, i64)
+
+define i32 @vsetvlimax_i32() nounwind #0 {
+; CHECK-LABEL: @vsetvlimax_i32(
+; CHECK-NEXT:  entry:
+; CHECK-NEXT:    [[TMP0:%.*]] = call i32 @llvm.riscv.vsetvlimax.i32(i32 1, i32 1)
+; CHECK-NEXT:    ret i32 [[TMP0]]
+;
+entry:
+  %0 = call i32 @llvm.riscv.vsetvlimax.i32(i32 1, i32 1)
+  %1 = and i32 %0, 2147483647
+  ret i32 %1
+}
+
+define i64 @vsetvlimax_sext_i64() nounwind #0 {
+; CHECK-LABEL: @vsetvlimax_sext_i64(
+; CHECK-NEXT:  entry:
+; CHECK-NEXT:    [[TMP0:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
+; CHECK-NEXT:    ret i64 [[TMP0]]
+;
+entry:
+  %0 = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
+  %1 = trunc i64 %0 to i32
+  %2 = sext i32 %1 to i64
+  ret i64 %2
+}
+
+define i64 @vsetvlimax_zext_i64() nounwind #0 {
+; CHECK-LABEL: @vsetvlimax_zext_i64(
+; CHECK-NEXT:  entry:
+; CHECK-NEXT:    [[TMP0:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
+; CHECK-NEXT:    ret i64 [[TMP0]]
+;
+entry:
+  %0 = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
+  %1 = trunc i64 %0 to i32
+  %2 = zext i32 %1 to i64
+  ret i64 %2
+}
+
+define signext i32 @vsetvlmax_sext() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_sext(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
+; CHECK-NEXT:    [[B:%.*]] = trunc i64 [[A]] to i32
+; CHECK-NEXT:    ret i32 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 1)
+  %b = trunc i64 %a to i32
+  ret i32 %b
+}
+
+define zeroext i32 @vsetvlmax_zext() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_zext(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
+; CHECK-NEXT:    [[B:%.*]] = trunc i64 [[A]] to i32
+; CHECK-NEXT:    ret i32 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 1)
+  %b = trunc i64 %a to i32
+  ret i32 %b
+}
+
+define i32 @vsetvlimax_and17_i32() nounwind #0 {
+; CHECK-LABEL: @vsetvlimax_and17_i32(
+; CHECK-NEXT:  entry:
+; CHECK-NEXT:    [[TMP0:%.*]] = call i32 @llvm.riscv.vsetvlimax.i32(i32 1, i32 1)
+; CHECK-NEXT:    ret i32 [[TMP0]]
+;
+entry:
+  %0 = call i32 @llvm.riscv.vsetvlimax.i32(i32 1, i32 1)
+  %1 = and i32 %0, 131071
+  ret i32 %1
+}
+
+define i64 @vsetvlimax_and17_i64() nounwind #0 {
+; CHECK-LABEL: @vsetvlimax_and17_i64(
+; CHECK-NEXT:  entry:
+; CHECK-NEXT:    [[TMP0:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
+; CHECK-NEXT:    ret i64 [[TMP0]]
+;
+entry:
+  %0 = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
+  %1 = and i64 %0, 131071
+  ret i64 %1
+}
+
+define i64 @vsetvlmax_e8m1_and14bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e8m1_and14bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 0)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 16383
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 0)
+  %b = and i64 %a, 16383
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e8m1_and13bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e8m1_and13bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 0)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 8191
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 0)
+  %b = and i64 %a, 8191
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e8m2_and15bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e8m2_and15bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 1)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 32767
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 1)
+  %b = and i64 %a, 32767
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e8m2_and14bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e8m2_and14bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 1)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 16383
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 1)
+  %b = and i64 %a, 16383
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e8m4_and16bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e8m4_and16bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 2)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 65535
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 2)
+  %b = and i64 %a, 65535
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e8m4_and15bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e8m4_and15bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 2)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 32767
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 2)
+  %b = and i64 %a, 32767
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e8m8_and17bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e8m8_and17bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 3)
+; CHECK-NEXT:    ret i64 [[A]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 3)
+  %b = and i64 %a, 131071
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e8m8_and16bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e8m8_and16bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 3)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 65535
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 3)
+  %b = and i64 %a, 65535
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e8mf2_and11bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e8mf2_and11bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 5)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 2047
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 5)
+  %b = and i64 %a, 2047
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e8mf2_and10bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e8mf2_and10bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 5)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 1023
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 5)
+  %b = and i64 %a, 1023
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e8mf4_and12bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e8mf4_and12bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 6)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 4095
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 6)
+  %b = and i64 %a, 4095
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e8mf4_and11bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e8mf4_and11bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 6)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 2047
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 6)
+  %b = and i64 %a, 2047
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e8mf8_and13bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e8mf8_and13bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 7)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 8191
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 7)
+  %b = and i64 %a, 8191
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e8mf8_and12bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e8mf8_and12bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 7)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 4095
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 7)
+  %b = and i64 %a, 4095
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e16m1_and13bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e16m1_and13bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 0)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 8191
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 0)
+  %b = and i64 %a, 8191
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e16m1_and12bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e16m1_and12bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 0)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 4095
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 0)
+  %b = and i64 %a, 4095
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e16m2_and14bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e16m2_and14bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 16383
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 1)
+  %b = and i64 %a, 16383
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e16m2_and13bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e16m2_and13bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 8191
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 1)
+  %b = and i64 %a, 8191
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e16m4_and15bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e16m4_and15bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 2)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 32767
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 2)
+  %b = and i64 %a, 32767
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e16m4_and14bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e16m4_and14bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 2)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 16383
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 2)
+  %b = and i64 %a, 16383
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e16m8_and16bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e16m8_and16bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 3)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 65535
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 3)
+  %b = and i64 %a, 65535
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e16m8_and15bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e16m8_and15bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 3)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 32767
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 3)
+  %b = and i64 %a, 32767
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e16mf2_and10bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e16mf2_and10bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 5)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 1023
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 5)
+  %b = and i64 %a, 1023
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e16mf2_and9bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e16mf2_and9bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 5)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 511
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 5)
+  %b = and i64 %a, 511
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e16mf4_and11bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e16mf4_and11bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 6)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 2047
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 6)
+  %b = and i64 %a, 2047
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e16mf4_and10bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e16mf4_and10bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 6)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 1023
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 6)
+  %b = and i64 %a, 1023
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e16mf8_and12bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e16mf8_and12bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 7)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 4095
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 7)
+  %b = and i64 %a, 4095
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e16mf8_and11bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e16mf8_and11bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 7)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 2047
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 7)
+  %b = and i64 %a, 2047
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e32m1_and12bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e32m1_and12bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 0)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 4095
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 0)
+  %b = and i64 %a, 4095
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e32m1_and11bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e32m1_and11bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 0)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 2047
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 0)
+  %b = and i64 %a, 2047
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e32m2_and13bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e32m2_and13bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 1)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 8191
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 1)
+  %b = and i64 %a, 8191
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e32m2_and12bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e32m2_and12bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 1)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 4095
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 1)
+  %b = and i64 %a, 4095
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e32m4_and14bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e32m4_and14bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 2)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 16383
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 2)
+  %b = and i64 %a, 16383
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e32m4_and13bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e32m4_and13bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 2)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 8191
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 2)
+  %b = and i64 %a, 8191
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e32m8_and15bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e32m8_and15bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 3)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 32767
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 3)
+  %b = and i64 %a, 32767
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e32m8_and14bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e32m8_and14bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 3)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 16383
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 3)
+  %b = and i64 %a, 16383
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e32mf2_and9bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e32mf2_and9bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 5)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 511
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 5)
+  %b = and i64 %a, 511
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e32mf2_and8bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e32mf2_and8bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 5)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 255
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 5)
+  %b = and i64 %a, 255
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e32mf4_and10bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e32mf4_and10bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 6)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 1023
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 6)
+  %b = and i64 %a, 1023
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e32mf4_and9bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e32mf4_and9bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 6)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 511
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 6)
+  %b = and i64 %a, 511
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e32mf8_and11bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e32mf8_and11bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 7)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 2047
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 7)
+  %b = and i64 %a, 2047
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e32mf8_and10bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e32mf8_and10bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 7)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 1023
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 7)
+  %b = and i64 %a, 1023
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e64m1_and11bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e64m1_and11bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 0)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 2047
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 0)
+  %b = and i64 %a, 2047
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e64m1_and10bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e64m1_and10bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 0)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 1023
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 0)
+  %b = and i64 %a, 1023
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e64m2_and12bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e64m2_and12bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 1)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 4095
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 1)
+  %b = and i64 %a, 4095
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e64m2_and11bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e64m2_and11bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 1)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 2047
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 1)
+  %b = and i64 %a, 2047
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e64m4_and13bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e64m4_and13bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 2)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 8191
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 2)
+  %b = and i64 %a, 8191
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e64m4_and12bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e64m4_and12bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 2)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 4095
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 2)
+  %b = and i64 %a, 4095
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e64m8_and14bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e64m8_and14bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 3)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 16383
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 3)
+  %b = and i64 %a, 16383
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e64m8_and13bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e64m8_and13bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 3)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 8191
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 3)
+  %b = and i64 %a, 8191
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e64mf2_and8bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e64mf2_and8bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 5)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 255
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 5)
+  %b = and i64 %a, 255
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e64mf2_and7bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e64mf2_and7bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 5)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 127
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 5)
+  %b = and i64 %a, 127
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e64mf4_and9bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e64mf4_and9bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 6)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 511
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 6)
+  %b = and i64 %a, 511
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e64mf4_and8bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e64mf4_and8bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 6)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 255
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 6)
+  %b = and i64 %a, 255
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e64mf8_and10bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e64mf8_and10bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 7)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 1023
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 7)
+  %b = and i64 %a, 1023
+  ret i64 %b
+}
+
+define i64 @vsetvlmax_e64mf8_and9bits() nounwind #0 {
+; CHECK-LABEL: @vsetvlmax_e64mf8_and9bits(
+; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 7)
+; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 511
+; CHECK-NEXT:    ret i64 [[B]]
+;
+  %a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 7)
+  %b = and i64 %a, 511
+  ret i64 %b
+}
+
+attributes #0 = { vscale_range(2,1024) }

>From 7d65da6e6fd84a3fbbbef352c6250ca135270805 Mon Sep 17 00:00:00 2001
From: Wang Pengcheng <wangpengcheng.pp at bytedance.com>
Date: Sun, 18 Feb 2024 17:09:47 +0800
Subject: [PATCH 2/4] [RISCV] Take SEW/LMUL into account for value tracking of
 vsetvli[max]

So that we can benefit from some instcombine optimizations.
---
 llvm/lib/Analysis/ValueTracking.cpp           | 30 +++++--
 .../RISCV/riscv-vsetvli-knownbits.ll          | 84 +++++++------------
 .../RISCV/riscv-vsetvlimax-knownbits.ll       | 81 ++++++------------
 3 files changed, 80 insertions(+), 115 deletions(-)

diff --git a/llvm/lib/Analysis/ValueTracking.cpp b/llvm/lib/Analysis/ValueTracking.cpp
index 9d78c5d323cc6c..1e09caa66f5aa6 100644
--- a/llvm/lib/Analysis/ValueTracking.cpp
+++ b/llvm/lib/Analysis/ValueTracking.cpp
@@ -73,6 +73,7 @@
 #include "llvm/Support/ErrorHandling.h"
 #include "llvm/Support/KnownBits.h"
 #include "llvm/Support/MathExtras.h"
+#include "llvm/TargetParser/RISCVTargetParser.h"
 #include <algorithm>
 #include <cassert>
 #include <cstdint>
@@ -1586,12 +1587,31 @@ static void computeKnownBitsFromOperator(const Operator *I,
         Known.Zero.setBitsFrom(32);
         break;
       case Intrinsic::riscv_vsetvli:
-      case Intrinsic::riscv_vsetvlimax:
-        // Assume that VL output is <= 65536.
-        // TODO: Take SEW and LMUL into account.
-        if (BitWidth > 17)
-          Known.Zero.setBitsFrom(17);
+      case Intrinsic::riscv_vsetvlimax: {
+        bool HasAVL = II->getIntrinsicID() == Intrinsic::riscv_vsetvli;
+        const ConstantRange &Range =
+            getVScaleRange(II->getFunction(), BitWidth);
+        uint64_t VSEW =
+            cast<ConstantInt>(II->getArgOperand(HasAVL))->getZExtValue();
+        uint64_t SEW = 1 << (VSEW + 3);
+        uint64_t VLMUL =
+            cast<ConstantInt>(II->getArgOperand(1 + HasAVL))->getZExtValue();
+        bool Fractional = VLMUL > 4;
+        uint64_t LMUL = Fractional ? (1 << (8 - VLMUL)) : (1 << VLMUL);
+        uint64_t MaxVL =
+            Range.getUpper().getZExtValue() * RISCV::RVVBitsPerBlock / SEW;
+        MaxVL = Fractional ? MaxVL / LMUL : MaxVL * LMUL;
+
+        // Result of vsetvli must be not larger than AVL.
+        if (HasAVL)
+          if (auto *CI = dyn_cast<ConstantInt>(II->getArgOperand(0)))
+            MaxVL = std::min(MaxVL, CI->getZExtValue());
+
+        unsigned KnownZeroFirstBit = Log2_32(MaxVL) + 1;
+        if (BitWidth > KnownZeroFirstBit)
+          Known.Zero.setBitsFrom(KnownZeroFirstBit);
         break;
+      }
       case Intrinsic::vscale: {
         if (!II->getParent() || !II->getFunction())
           break;
diff --git a/llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvli-knownbits.ll b/llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvli-knownbits.ll
index e254d2a71b7f53..1afae6565fe26b 100644
--- a/llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvli-knownbits.ll
+++ b/llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvli-knownbits.ll
@@ -91,8 +91,7 @@ entry:
 define i64 @vsetvl_e8m1_and14bits(i64 %avl) nounwind #0 {
 ; CHECK-LABEL: @vsetvl_e8m1_and14bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 0)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 16383
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 0)
   %b = and i64 %a, 16383
@@ -113,8 +112,7 @@ define i64 @vsetvl_e8m1_and13bits(i64 %avl) nounwind #0 {
 define i64 @vsetvl_e8m1_constant_avl() nounwind #0 {
 ; CHECK-LABEL: @vsetvl_e8m1_constant_avl(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 1, i64 0, i64 0)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 1
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvli(i64 1, i64 0, i64 0)
   %b = and i64 %a, 1
@@ -124,8 +122,7 @@ define i64 @vsetvl_e8m1_constant_avl() nounwind #0 {
 define i64 @vsetvl_e8m2_and15bits(i64 %avl) nounwind #0 {
 ; CHECK-LABEL: @vsetvl_e8m2_and15bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 1)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 32767
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 1)
   %b = and i64 %a, 32767
@@ -146,8 +143,7 @@ define i64 @vsetvl_e8m2_and14bits(i64 %avl) nounwind #0 {
 define i64 @vsetvl_e8m4_and16bits(i64 %avl) nounwind #0 {
 ; CHECK-LABEL: @vsetvl_e8m4_and16bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 2)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 65535
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 2)
   %b = and i64 %a, 65535
@@ -189,8 +185,7 @@ define i64 @vsetvl_e8m8_and16bits(i64 %avl) nounwind #0 {
 define i64 @vsetvl_e8mf2_and11bits(i64 %avl) nounwind #0 {
 ; CHECK-LABEL: @vsetvl_e8mf2_and11bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 5)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 2047
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 5)
   %b = and i64 %a, 2047
@@ -211,8 +206,7 @@ define i64 @vsetvl_e8mf2_and10bits(i64 %avl) nounwind #0 {
 define i64 @vsetvl_e8mf4_and12bits(i64 %avl) nounwind #0 {
 ; CHECK-LABEL: @vsetvl_e8mf4_and12bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 6)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 4095
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 6)
   %b = and i64 %a, 4095
@@ -233,8 +227,7 @@ define i64 @vsetvl_e8mf4_and11bits(i64 %avl) nounwind #0 {
 define i64 @vsetvl_e8mf8_and13bits(i64 %avl) nounwind #0 {
 ; CHECK-LABEL: @vsetvl_e8mf8_and13bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 7)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 8191
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 7)
   %b = and i64 %a, 8191
@@ -255,8 +248,7 @@ define i64 @vsetvl_e8mf8_and12bits(i64 %avl) nounwind #0 {
 define i64 @vsetvl_e16m1_and13bits(i64 %avl) nounwind #0 {
 ; CHECK-LABEL: @vsetvl_e16m1_and13bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 0)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 8191
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 0)
   %b = and i64 %a, 8191
@@ -277,8 +269,7 @@ define i64 @vsetvl_e16m1_and12bits(i64 %avl) nounwind #0 {
 define i64 @vsetvl_e16m2_and14bits(i64 %avl) nounwind #0 {
 ; CHECK-LABEL: @vsetvl_e16m2_and14bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 1)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 16383
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 1)
   %b = and i64 %a, 16383
@@ -299,8 +290,7 @@ define i64 @vsetvl_e16m2_and13bits(i64 %avl) nounwind #0 {
 define i64 @vsetvl_e16m4_and15bits(i64 %avl) nounwind #0 {
 ; CHECK-LABEL: @vsetvl_e16m4_and15bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 2)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 32767
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 2)
   %b = and i64 %a, 32767
@@ -321,8 +311,7 @@ define i64 @vsetvl_e16m4_and14bits(i64 %avl) nounwind #0 {
 define i64 @vsetvl_e16m8_and16bits(i64 %avl) nounwind #0 {
 ; CHECK-LABEL: @vsetvl_e16m8_and16bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 3)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 65535
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 3)
   %b = and i64 %a, 65535
@@ -343,8 +332,7 @@ define i64 @vsetvl_e16m8_and15bits(i64 %avl) nounwind #0 {
 define i64 @vsetvl_e16mf2_and10bits(i64 %avl) nounwind #0 {
 ; CHECK-LABEL: @vsetvl_e16mf2_and10bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 5)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 1023
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 5)
   %b = and i64 %a, 1023
@@ -365,8 +353,7 @@ define i64 @vsetvl_e16mf2_and9bits(i64 %avl) nounwind #0 {
 define i64 @vsetvl_e16mf4_and11bits(i64 %avl) nounwind #0 {
 ; CHECK-LABEL: @vsetvl_e16mf4_and11bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 6)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 2047
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 6)
   %b = and i64 %a, 2047
@@ -387,8 +374,7 @@ define i64 @vsetvl_e16mf4_and10bits(i64 %avl) nounwind #0 {
 define i64 @vsetvl_e16mf8_and12bits(i64 %avl) nounwind #0 {
 ; CHECK-LABEL: @vsetvl_e16mf8_and12bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 7)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 4095
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 7)
   %b = and i64 %a, 4095
@@ -409,8 +395,7 @@ define i64 @vsetvl_e16mf8_and11bits(i64 %avl) nounwind #0 {
 define i64 @vsetvl_e32m1_and12bits(i64 %avl) nounwind #0 {
 ; CHECK-LABEL: @vsetvl_e32m1_and12bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 0)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 4095
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 0)
   %b = and i64 %a, 4095
@@ -431,8 +416,7 @@ define i64 @vsetvl_e32m1_and11bits(i64 %avl) nounwind #0 {
 define i64 @vsetvl_e32m2_and13bits(i64 %avl) nounwind #0 {
 ; CHECK-LABEL: @vsetvl_e32m2_and13bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 1)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 8191
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 1)
   %b = and i64 %a, 8191
@@ -453,8 +437,7 @@ define i64 @vsetvl_e32m2_and12bits(i64 %avl) nounwind #0 {
 define i64 @vsetvl_e32m4_and14bits(i64 %avl) nounwind #0 {
 ; CHECK-LABEL: @vsetvl_e32m4_and14bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 2)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 16383
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 2)
   %b = and i64 %a, 16383
@@ -475,8 +458,7 @@ define i64 @vsetvl_e32m4_and13bits(i64 %avl) nounwind #0 {
 define i64 @vsetvl_e32m8_and15bits(i64 %avl) nounwind #0 {
 ; CHECK-LABEL: @vsetvl_e32m8_and15bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 3)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 32767
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 3)
   %b = and i64 %a, 32767
@@ -497,8 +479,7 @@ define i64 @vsetvl_e32m8_and14bits(i64 %avl) nounwind #0 {
 define i64 @vsetvl_e32mf2_and9bits(i64 %avl) nounwind #0 {
 ; CHECK-LABEL: @vsetvl_e32mf2_and9bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 5)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 511
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 5)
   %b = and i64 %a, 511
@@ -519,8 +500,7 @@ define i64 @vsetvl_e32mf2_and8bits(i64 %avl) nounwind #0 {
 define i64 @vsetvl_e32mf4_and10bits(i64 %avl) nounwind #0 {
 ; CHECK-LABEL: @vsetvl_e32mf4_and10bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 6)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 1023
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 6)
   %b = and i64 %a, 1023
@@ -541,8 +521,7 @@ define i64 @vsetvl_e32mf4_and9bits(i64 %avl) nounwind #0 {
 define i64 @vsetvl_e32mf8_and11bits(i64 %avl) nounwind #0 {
 ; CHECK-LABEL: @vsetvl_e32mf8_and11bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 7)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 2047
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 7)
   %b = and i64 %a, 2047
@@ -563,8 +542,7 @@ define i64 @vsetvl_e32mf8_and10bits(i64 %avl) nounwind #0 {
 define i64 @vsetvl_e64m1_and11bits(i64 %avl) nounwind #0 {
 ; CHECK-LABEL: @vsetvl_e64m1_and11bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 0)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 2047
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 0)
   %b = and i64 %a, 2047
@@ -585,8 +563,7 @@ define i64 @vsetvl_e64m1_and10bits(i64 %avl) nounwind #0 {
 define i64 @vsetvl_e64m2_and12bits(i64 %avl) nounwind #0 {
 ; CHECK-LABEL: @vsetvl_e64m2_and12bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 1)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 4095
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 1)
   %b = and i64 %a, 4095
@@ -607,8 +584,7 @@ define i64 @vsetvl_e64m2_and11bits(i64 %avl) nounwind #0 {
 define i64 @vsetvl_e64m4_and13bits(i64 %avl) nounwind #0 {
 ; CHECK-LABEL: @vsetvl_e64m4_and13bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 2)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 8191
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 2)
   %b = and i64 %a, 8191
@@ -629,8 +605,7 @@ define i64 @vsetvl_e64m4_and12bits(i64 %avl) nounwind #0 {
 define i64 @vsetvl_e64m8_and14bits(i64 %avl) nounwind #0 {
 ; CHECK-LABEL: @vsetvl_e64m8_and14bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 3)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 16383
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 3)
   %b = and i64 %a, 16383
@@ -651,8 +626,7 @@ define i64 @vsetvl_e64m8_and13bits(i64 %avl) nounwind #0 {
 define i64 @vsetvl_e64mf2_and8bits(i64 %avl) nounwind #0 {
 ; CHECK-LABEL: @vsetvl_e64mf2_and8bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 5)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 255
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 5)
   %b = and i64 %a, 255
@@ -673,8 +647,7 @@ define i64 @vsetvl_e64mf2_and7bits(i64 %avl) nounwind #0 {
 define i64 @vsetvl_e64mf4_and9bits(i64 %avl) nounwind #0 {
 ; CHECK-LABEL: @vsetvl_e64mf4_and9bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 6)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 511
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 6)
   %b = and i64 %a, 511
@@ -695,8 +668,7 @@ define i64 @vsetvl_e64mf4_and8bits(i64 %avl) nounwind #0 {
 define i64 @vsetvl_e64mf8_and10bits(i64 %avl) nounwind #0 {
 ; CHECK-LABEL: @vsetvl_e64mf8_and10bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 7)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 1023
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 7)
   %b = and i64 %a, 1023
diff --git a/llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvlimax-knownbits.ll b/llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvlimax-knownbits.ll
index 72033b3af09f33..093ba75e87b5a7 100644
--- a/llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvlimax-knownbits.ll
+++ b/llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvlimax-knownbits.ll
@@ -91,8 +91,7 @@ entry:
 define i64 @vsetvlmax_e8m1_and14bits() nounwind #0 {
 ; CHECK-LABEL: @vsetvlmax_e8m1_and14bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 0)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 16383
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 0)
   %b = and i64 %a, 16383
@@ -113,8 +112,7 @@ define i64 @vsetvlmax_e8m1_and13bits() nounwind #0 {
 define i64 @vsetvlmax_e8m2_and15bits() nounwind #0 {
 ; CHECK-LABEL: @vsetvlmax_e8m2_and15bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 1)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 32767
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 1)
   %b = and i64 %a, 32767
@@ -135,8 +133,7 @@ define i64 @vsetvlmax_e8m2_and14bits() nounwind #0 {
 define i64 @vsetvlmax_e8m4_and16bits() nounwind #0 {
 ; CHECK-LABEL: @vsetvlmax_e8m4_and16bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 2)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 65535
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 2)
   %b = and i64 %a, 65535
@@ -178,8 +175,7 @@ define i64 @vsetvlmax_e8m8_and16bits() nounwind #0 {
 define i64 @vsetvlmax_e8mf2_and11bits() nounwind #0 {
 ; CHECK-LABEL: @vsetvlmax_e8mf2_and11bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 5)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 2047
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 5)
   %b = and i64 %a, 2047
@@ -200,8 +196,7 @@ define i64 @vsetvlmax_e8mf2_and10bits() nounwind #0 {
 define i64 @vsetvlmax_e8mf4_and12bits() nounwind #0 {
 ; CHECK-LABEL: @vsetvlmax_e8mf4_and12bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 6)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 4095
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 6)
   %b = and i64 %a, 4095
@@ -222,8 +217,7 @@ define i64 @vsetvlmax_e8mf4_and11bits() nounwind #0 {
 define i64 @vsetvlmax_e8mf8_and13bits() nounwind #0 {
 ; CHECK-LABEL: @vsetvlmax_e8mf8_and13bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 7)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 8191
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 7)
   %b = and i64 %a, 8191
@@ -244,8 +238,7 @@ define i64 @vsetvlmax_e8mf8_and12bits() nounwind #0 {
 define i64 @vsetvlmax_e16m1_and13bits() nounwind #0 {
 ; CHECK-LABEL: @vsetvlmax_e16m1_and13bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 0)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 8191
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 0)
   %b = and i64 %a, 8191
@@ -266,8 +259,7 @@ define i64 @vsetvlmax_e16m1_and12bits() nounwind #0 {
 define i64 @vsetvlmax_e16m2_and14bits() nounwind #0 {
 ; CHECK-LABEL: @vsetvlmax_e16m2_and14bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 16383
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 1)
   %b = and i64 %a, 16383
@@ -288,8 +280,7 @@ define i64 @vsetvlmax_e16m2_and13bits() nounwind #0 {
 define i64 @vsetvlmax_e16m4_and15bits() nounwind #0 {
 ; CHECK-LABEL: @vsetvlmax_e16m4_and15bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 2)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 32767
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 2)
   %b = and i64 %a, 32767
@@ -310,8 +301,7 @@ define i64 @vsetvlmax_e16m4_and14bits() nounwind #0 {
 define i64 @vsetvlmax_e16m8_and16bits() nounwind #0 {
 ; CHECK-LABEL: @vsetvlmax_e16m8_and16bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 3)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 65535
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 3)
   %b = and i64 %a, 65535
@@ -332,8 +322,7 @@ define i64 @vsetvlmax_e16m8_and15bits() nounwind #0 {
 define i64 @vsetvlmax_e16mf2_and10bits() nounwind #0 {
 ; CHECK-LABEL: @vsetvlmax_e16mf2_and10bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 5)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 1023
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 5)
   %b = and i64 %a, 1023
@@ -354,8 +343,7 @@ define i64 @vsetvlmax_e16mf2_and9bits() nounwind #0 {
 define i64 @vsetvlmax_e16mf4_and11bits() nounwind #0 {
 ; CHECK-LABEL: @vsetvlmax_e16mf4_and11bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 6)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 2047
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 6)
   %b = and i64 %a, 2047
@@ -376,8 +364,7 @@ define i64 @vsetvlmax_e16mf4_and10bits() nounwind #0 {
 define i64 @vsetvlmax_e16mf8_and12bits() nounwind #0 {
 ; CHECK-LABEL: @vsetvlmax_e16mf8_and12bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 7)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 4095
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 7)
   %b = and i64 %a, 4095
@@ -398,8 +385,7 @@ define i64 @vsetvlmax_e16mf8_and11bits() nounwind #0 {
 define i64 @vsetvlmax_e32m1_and12bits() nounwind #0 {
 ; CHECK-LABEL: @vsetvlmax_e32m1_and12bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 0)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 4095
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 0)
   %b = and i64 %a, 4095
@@ -420,8 +406,7 @@ define i64 @vsetvlmax_e32m1_and11bits() nounwind #0 {
 define i64 @vsetvlmax_e32m2_and13bits() nounwind #0 {
 ; CHECK-LABEL: @vsetvlmax_e32m2_and13bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 1)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 8191
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 1)
   %b = and i64 %a, 8191
@@ -442,8 +427,7 @@ define i64 @vsetvlmax_e32m2_and12bits() nounwind #0 {
 define i64 @vsetvlmax_e32m4_and14bits() nounwind #0 {
 ; CHECK-LABEL: @vsetvlmax_e32m4_and14bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 2)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 16383
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 2)
   %b = and i64 %a, 16383
@@ -464,8 +448,7 @@ define i64 @vsetvlmax_e32m4_and13bits() nounwind #0 {
 define i64 @vsetvlmax_e32m8_and15bits() nounwind #0 {
 ; CHECK-LABEL: @vsetvlmax_e32m8_and15bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 3)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 32767
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 3)
   %b = and i64 %a, 32767
@@ -486,8 +469,7 @@ define i64 @vsetvlmax_e32m8_and14bits() nounwind #0 {
 define i64 @vsetvlmax_e32mf2_and9bits() nounwind #0 {
 ; CHECK-LABEL: @vsetvlmax_e32mf2_and9bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 5)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 511
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 5)
   %b = and i64 %a, 511
@@ -508,8 +490,7 @@ define i64 @vsetvlmax_e32mf2_and8bits() nounwind #0 {
 define i64 @vsetvlmax_e32mf4_and10bits() nounwind #0 {
 ; CHECK-LABEL: @vsetvlmax_e32mf4_and10bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 6)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 1023
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 6)
   %b = and i64 %a, 1023
@@ -530,8 +511,7 @@ define i64 @vsetvlmax_e32mf4_and9bits() nounwind #0 {
 define i64 @vsetvlmax_e32mf8_and11bits() nounwind #0 {
 ; CHECK-LABEL: @vsetvlmax_e32mf8_and11bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 7)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 2047
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 7)
   %b = and i64 %a, 2047
@@ -552,8 +532,7 @@ define i64 @vsetvlmax_e32mf8_and10bits() nounwind #0 {
 define i64 @vsetvlmax_e64m1_and11bits() nounwind #0 {
 ; CHECK-LABEL: @vsetvlmax_e64m1_and11bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 0)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 2047
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 0)
   %b = and i64 %a, 2047
@@ -574,8 +553,7 @@ define i64 @vsetvlmax_e64m1_and10bits() nounwind #0 {
 define i64 @vsetvlmax_e64m2_and12bits() nounwind #0 {
 ; CHECK-LABEL: @vsetvlmax_e64m2_and12bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 1)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 4095
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 1)
   %b = and i64 %a, 4095
@@ -596,8 +574,7 @@ define i64 @vsetvlmax_e64m2_and11bits() nounwind #0 {
 define i64 @vsetvlmax_e64m4_and13bits() nounwind #0 {
 ; CHECK-LABEL: @vsetvlmax_e64m4_and13bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 2)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 8191
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 2)
   %b = and i64 %a, 8191
@@ -618,8 +595,7 @@ define i64 @vsetvlmax_e64m4_and12bits() nounwind #0 {
 define i64 @vsetvlmax_e64m8_and14bits() nounwind #0 {
 ; CHECK-LABEL: @vsetvlmax_e64m8_and14bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 3)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 16383
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 3)
   %b = and i64 %a, 16383
@@ -640,8 +616,7 @@ define i64 @vsetvlmax_e64m8_and13bits() nounwind #0 {
 define i64 @vsetvlmax_e64mf2_and8bits() nounwind #0 {
 ; CHECK-LABEL: @vsetvlmax_e64mf2_and8bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 5)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 255
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 5)
   %b = and i64 %a, 255
@@ -662,8 +637,7 @@ define i64 @vsetvlmax_e64mf2_and7bits() nounwind #0 {
 define i64 @vsetvlmax_e64mf4_and9bits() nounwind #0 {
 ; CHECK-LABEL: @vsetvlmax_e64mf4_and9bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 6)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 511
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 6)
   %b = and i64 %a, 511
@@ -684,8 +658,7 @@ define i64 @vsetvlmax_e64mf4_and8bits() nounwind #0 {
 define i64 @vsetvlmax_e64mf8_and10bits() nounwind #0 {
 ; CHECK-LABEL: @vsetvlmax_e64mf8_and10bits(
 ; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 7)
-; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 1023
-; CHECK-NEXT:    ret i64 [[B]]
+; CHECK-NEXT:    ret i64 [[A]]
 ;
   %a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 7)
   %b = and i64 %a, 1023

>From 5a669e1862a78a69afd2f9cb7971092550f9f84f Mon Sep 17 00:00:00 2001
From: Wang Pengcheng <wangpengcheng.pp at bytedance.com>
Date: Tue, 27 Feb 2024 14:37:06 +0800
Subject: [PATCH 3/4] Remove reference

---
 llvm/lib/Analysis/ValueTracking.cpp | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)

diff --git a/llvm/lib/Analysis/ValueTracking.cpp b/llvm/lib/Analysis/ValueTracking.cpp
index 1e09caa66f5aa6..99643a924252a4 100644
--- a/llvm/lib/Analysis/ValueTracking.cpp
+++ b/llvm/lib/Analysis/ValueTracking.cpp
@@ -1589,8 +1589,7 @@ static void computeKnownBitsFromOperator(const Operator *I,
       case Intrinsic::riscv_vsetvli:
       case Intrinsic::riscv_vsetvlimax: {
         bool HasAVL = II->getIntrinsicID() == Intrinsic::riscv_vsetvli;
-        const ConstantRange &Range =
-            getVScaleRange(II->getFunction(), BitWidth);
+        const ConstantRange Range = getVScaleRange(II->getFunction(), BitWidth);
         uint64_t VSEW =
             cast<ConstantInt>(II->getArgOperand(HasAVL))->getZExtValue();
         uint64_t SEW = 1 << (VSEW + 3);

>From 5fc432992b03b1b61374df0abe2f22681c0dab0e Mon Sep 17 00:00:00 2001
From: Wang Pengcheng <wangpengcheng.pp at bytedance.com>
Date: Wed, 6 Mar 2024 13:30:02 +0800
Subject: [PATCH 4/4] Rebase and use helpers in RISCVVType

---
 llvm/lib/Analysis/ValueTracking.cpp | 17 +++++++----------
 1 file changed, 7 insertions(+), 10 deletions(-)

diff --git a/llvm/lib/Analysis/ValueTracking.cpp b/llvm/lib/Analysis/ValueTracking.cpp
index 99643a924252a4..9d3c7899d4377a 100644
--- a/llvm/lib/Analysis/ValueTracking.cpp
+++ b/llvm/lib/Analysis/ValueTracking.cpp
@@ -1590,16 +1590,13 @@ static void computeKnownBitsFromOperator(const Operator *I,
       case Intrinsic::riscv_vsetvlimax: {
         bool HasAVL = II->getIntrinsicID() == Intrinsic::riscv_vsetvli;
         const ConstantRange Range = getVScaleRange(II->getFunction(), BitWidth);
-        uint64_t VSEW =
-            cast<ConstantInt>(II->getArgOperand(HasAVL))->getZExtValue();
-        uint64_t SEW = 1 << (VSEW + 3);
-        uint64_t VLMUL =
-            cast<ConstantInt>(II->getArgOperand(1 + HasAVL))->getZExtValue();
-        bool Fractional = VLMUL > 4;
-        uint64_t LMUL = Fractional ? (1 << (8 - VLMUL)) : (1 << VLMUL);
-        uint64_t MaxVL =
-            Range.getUpper().getZExtValue() * RISCV::RVVBitsPerBlock / SEW;
-        MaxVL = Fractional ? MaxVL / LMUL : MaxVL * LMUL;
+        uint64_t SEW = RISCVVType::decodeVSEW(
+            cast<ConstantInt>(II->getArgOperand(HasAVL))->getZExtValue());
+        RISCVII::VLMUL VLMUL = static_cast<RISCVII::VLMUL>(
+            cast<ConstantInt>(II->getArgOperand(1 + HasAVL))->getZExtValue());
+        uint64_t MaxVLEN =
+            (Range.getUpper().getZExtValue() - 1) * RISCV::RVVBitsPerBlock;
+        uint64_t MaxVL = MaxVLEN / RISCVVType::getSEWLMULRatio(SEW, VLMUL);
 
         // Result of vsetvli must be not larger than AVL.
         if (HasAVL)



More information about the llvm-commits mailing list