[llvm] [RISCV] Take SEW/LMUL into account for value tracking of vsetvli[max] (PR #82163)
Wang Pengcheng via llvm-commits
llvm-commits at lists.llvm.org
Sun Feb 18 04:52:15 PST 2024
https://github.com/wangpc-pp created https://github.com/llvm/llvm-project/pull/82163
So that we can benefit from some instcombine optimizations.
This PR contains two commits: the first is for adding tests and the second is for the optimization.
>From 49c8c42833fe2b43505eb27255115284a5e74657 Mon Sep 17 00:00:00 2001
From: Wang Pengcheng <wangpengcheng.pp at bytedance.com>
Date: Sun, 18 Feb 2024 20:39:05 +0800
Subject: [PATCH 1/2] [RISCV] Add tests for vsetvli/vsetvlimax with different
SEW/LMUL
---
.../RISCV/riscv-vsetvli-knownbits.ll | 660 +++++++++++++++--
.../RISCV/riscv-vsetvlimax-knownbits.ll | 682 ++++++++++++++++++
2 files changed, 1293 insertions(+), 49 deletions(-)
create mode 100644 llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvlimax-knownbits.ll
diff --git a/llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvli-knownbits.ll b/llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvli-knownbits.ll
index 51f78688b13edb..5e5fec9cab8913 100644
--- a/llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvli-knownbits.ll
+++ b/llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvli-knownbits.ll
@@ -3,8 +3,6 @@
declare i32 @llvm.riscv.vsetvli.i32(i32, i32, i32)
declare i64 @llvm.riscv.vsetvli.i64(i64, i64, i64)
-declare i32 @llvm.riscv.vsetvlimax.i32(i32, i32)
-declare i64 @llvm.riscv.vsetvlimax.i64(i64, i64)
define i32 @vsetvli_i32() nounwind {
; CHECK-LABEL: @vsetvli_i32(
@@ -68,64 +66,628 @@ entry:
ret i64 %1
}
-define i32 @vsetvlimax_i32() nounwind {
-; CHECK-LABEL: @vsetvlimax_i32(
-; CHECK-NEXT: entry:
-; CHECK-NEXT: [[TMP0:%.*]] = call i32 @llvm.riscv.vsetvlimax.i32(i32 1, i32 1)
-; CHECK-NEXT: ret i32 [[TMP0]]
+define i64 @vsetvl_e8m1_and14bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e8m1_and14bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 0)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 16383
+; CHECK-NEXT: ret i64 [[B]]
;
-entry:
- %0 = call i32 @llvm.riscv.vsetvlimax.i32(i32 1, i32 1)
- %1 = and i32 %0, 2147483647
- ret i32 %1
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 0)
+ %b = and i64 %a, 16383
+ ret i64 %b
}
-define i64 @vsetvlimax_sext_i64() nounwind {
-; CHECK-LABEL: @vsetvlimax_sext_i64(
-; CHECK-NEXT: entry:
-; CHECK-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
-; CHECK-NEXT: ret i64 [[TMP0]]
+define i64 @vsetvl_e8m1_and13bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e8m1_and13bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 0)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 8191
+; CHECK-NEXT: ret i64 [[B]]
;
-entry:
- %0 = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
- %1 = trunc i64 %0 to i32
- %2 = sext i32 %1 to i64
- ret i64 %2
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 0)
+ %b = and i64 %a, 8191
+ ret i64 %b
}
-define i64 @vsetvlimax_zext_i64() nounwind {
-; CHECK-LABEL: @vsetvlimax_zext_i64(
-; CHECK-NEXT: entry:
-; CHECK-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
-; CHECK-NEXT: ret i64 [[TMP0]]
+define i64 @vsetvl_e8m1_constant_avl() {
+; CHECK-LABEL: @vsetvl_e8m1_constant_avl(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 1, i64 0, i64 0)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 1
+; CHECK-NEXT: ret i64 [[B]]
;
-entry:
- %0 = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
- %1 = trunc i64 %0 to i32
- %2 = zext i32 %1 to i64
- ret i64 %2
+ %a = call i64 @llvm.riscv.vsetvli(i64 1, i64 0, i64 0)
+ %b = and i64 %a, 1
+ ret i64 %b
}
-define i32 @vsetvlimax_and17_i32() nounwind {
-; CHECK-LABEL: @vsetvlimax_and17_i32(
-; CHECK-NEXT: entry:
-; CHECK-NEXT: [[TMP0:%.*]] = call i32 @llvm.riscv.vsetvlimax.i32(i32 1, i32 1)
-; CHECK-NEXT: ret i32 [[TMP0]]
+define i64 @vsetvl_e8m2_and15bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e8m2_and15bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 1)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 32767
+; CHECK-NEXT: ret i64 [[B]]
;
-entry:
- %0 = call i32 @llvm.riscv.vsetvlimax.i32(i32 1, i32 1)
- %1 = and i32 %0, 131071
- ret i32 %1
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 1)
+ %b = and i64 %a, 32767
+ ret i64 %b
}
-define i64 @vsetvlimax_and17_i64() nounwind {
-; CHECK-LABEL: @vsetvlimax_and17_i64(
-; CHECK-NEXT: entry:
-; CHECK-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
-; CHECK-NEXT: ret i64 [[TMP0]]
+define i64 @vsetvl_e8m2_and14bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e8m2_and14bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 1)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 16383
+; CHECK-NEXT: ret i64 [[B]]
;
-entry:
- %0 = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
- %1 = and i64 %0, 131071
- ret i64 %1
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 1)
+ %b = and i64 %a, 16383
+ ret i64 %b
+}
+
+define i64 @vsetvl_e8m4_and16bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e8m4_and16bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 2)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 65535
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 2)
+ %b = and i64 %a, 65535
+ ret i64 %b
+}
+
+define i64 @vsetvl_e8m4_and15bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e8m4_and15bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 2)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 32767
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 2)
+ %b = and i64 %a, 32767
+ ret i64 %b
+}
+
+define i64 @vsetvl_e8m8_and17bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e8m8_and17bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 3)
+; CHECK-NEXT: ret i64 [[A]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 3)
+ %b = and i64 %a, 131071
+ ret i64 %b
+}
+
+define i64 @vsetvl_e8m8_and16bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e8m8_and16bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 3)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 65535
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 3)
+ %b = and i64 %a, 65535
+ ret i64 %b
+}
+
+define i64 @vsetvl_e8mf2_and11bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e8mf2_and11bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 5)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 2047
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 5)
+ %b = and i64 %a, 2047
+ ret i64 %b
+}
+
+define i64 @vsetvl_e8mf2_and10bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e8mf2_and10bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 5)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 1023
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 5)
+ %b = and i64 %a, 1023
+ ret i64 %b
+}
+
+define i64 @vsetvl_e8mf4_and12bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e8mf4_and12bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 6)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 4095
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 6)
+ %b = and i64 %a, 4095
+ ret i64 %b
+}
+
+define i64 @vsetvl_e8mf4_and11bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e8mf4_and11bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 6)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 2047
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 6)
+ %b = and i64 %a, 2047
+ ret i64 %b
+}
+
+define i64 @vsetvl_e8mf8_and13bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e8mf8_and13bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 7)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 8191
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 7)
+ %b = and i64 %a, 8191
+ ret i64 %b
+}
+
+define i64 @vsetvl_e8mf8_and12bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e8mf8_and12bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 7)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 4095
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 7)
+ %b = and i64 %a, 4095
+ ret i64 %b
+}
+
+define i64 @vsetvl_e16m1_and13bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e16m1_and13bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 0)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 8191
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 0)
+ %b = and i64 %a, 8191
+ ret i64 %b
+}
+
+define i64 @vsetvl_e16m1_and12bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e16m1_and12bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 0)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 4095
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 0)
+ %b = and i64 %a, 4095
+ ret i64 %b
+}
+
+define i64 @vsetvl_e16m2_and14bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e16m2_and14bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 1)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 16383
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 1)
+ %b = and i64 %a, 16383
+ ret i64 %b
+}
+
+define i64 @vsetvl_e16m2_and13bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e16m2_and13bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 1)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 8191
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 1)
+ %b = and i64 %a, 8191
+ ret i64 %b
+}
+
+define i64 @vsetvl_e16m4_and15bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e16m4_and15bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 2)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 32767
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 2)
+ %b = and i64 %a, 32767
+ ret i64 %b
+}
+
+define i64 @vsetvl_e16m4_and14bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e16m4_and14bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 2)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 16383
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 2)
+ %b = and i64 %a, 16383
+ ret i64 %b
+}
+
+define i64 @vsetvl_e16m8_and16bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e16m8_and16bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 3)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 65535
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 3)
+ %b = and i64 %a, 65535
+ ret i64 %b
+}
+
+define i64 @vsetvl_e16m8_and15bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e16m8_and15bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 3)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 32767
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 3)
+ %b = and i64 %a, 32767
+ ret i64 %b
+}
+
+define i64 @vsetvl_e16mf2_and10bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e16mf2_and10bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 5)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 1023
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 5)
+ %b = and i64 %a, 1023
+ ret i64 %b
+}
+
+define i64 @vsetvl_e16mf2_and9bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e16mf2_and9bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 5)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 511
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 5)
+ %b = and i64 %a, 511
+ ret i64 %b
+}
+
+define i64 @vsetvl_e16mf4_and11bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e16mf4_and11bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 6)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 2047
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 6)
+ %b = and i64 %a, 2047
+ ret i64 %b
+}
+
+define i64 @vsetvl_e16mf4_and10bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e16mf4_and10bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 6)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 1023
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 6)
+ %b = and i64 %a, 1023
+ ret i64 %b
+}
+
+define i64 @vsetvl_e16mf8_and12bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e16mf8_and12bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 7)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 4095
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 7)
+ %b = and i64 %a, 4095
+ ret i64 %b
+}
+
+define i64 @vsetvl_e16mf8_and11bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e16mf8_and11bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 7)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 2047
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 7)
+ %b = and i64 %a, 2047
+ ret i64 %b
+}
+
+define i64 @vsetvl_e32m1_and12bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e32m1_and12bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 0)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 4095
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 0)
+ %b = and i64 %a, 4095
+ ret i64 %b
+}
+
+define i64 @vsetvl_e32m1_and11bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e32m1_and11bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 0)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 2047
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 0)
+ %b = and i64 %a, 2047
+ ret i64 %b
+}
+
+define i64 @vsetvl_e32m2_and13bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e32m2_and13bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 1)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 8191
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 1)
+ %b = and i64 %a, 8191
+ ret i64 %b
+}
+
+define i64 @vsetvl_e32m2_and12bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e32m2_and12bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 1)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 4095
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 1)
+ %b = and i64 %a, 4095
+ ret i64 %b
+}
+
+define i64 @vsetvl_e32m4_and14bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e32m4_and14bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 2)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 16383
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 2)
+ %b = and i64 %a, 16383
+ ret i64 %b
+}
+
+define i64 @vsetvl_e32m4_and13bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e32m4_and13bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 2)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 8191
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 2)
+ %b = and i64 %a, 8191
+ ret i64 %b
+}
+
+define i64 @vsetvl_e32m8_and15bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e32m8_and15bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 3)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 32767
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 3)
+ %b = and i64 %a, 32767
+ ret i64 %b
+}
+
+define i64 @vsetvl_e32m8_and14bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e32m8_and14bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 3)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 16383
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 3)
+ %b = and i64 %a, 16383
+ ret i64 %b
+}
+
+define i64 @vsetvl_e32mf2_and9bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e32mf2_and9bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 5)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 511
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 5)
+ %b = and i64 %a, 511
+ ret i64 %b
+}
+
+define i64 @vsetvl_e32mf2_and8bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e32mf2_and8bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 5)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 255
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 5)
+ %b = and i64 %a, 255
+ ret i64 %b
+}
+
+define i64 @vsetvl_e32mf4_and10bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e32mf4_and10bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 6)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 1023
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 6)
+ %b = and i64 %a, 1023
+ ret i64 %b
+}
+
+define i64 @vsetvl_e32mf4_and9bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e32mf4_and9bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 6)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 511
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 6)
+ %b = and i64 %a, 511
+ ret i64 %b
+}
+
+define i64 @vsetvl_e32mf8_and11bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e32mf8_and11bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 7)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 2047
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 7)
+ %b = and i64 %a, 2047
+ ret i64 %b
+}
+
+define i64 @vsetvl_e32mf8_and10bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e32mf8_and10bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 7)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 1023
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 7)
+ %b = and i64 %a, 1023
+ ret i64 %b
+}
+
+define i64 @vsetvl_e64m1_and11bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e64m1_and11bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 0)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 2047
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 0)
+ %b = and i64 %a, 2047
+ ret i64 %b
+}
+
+define i64 @vsetvl_e64m1_and10bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e64m1_and10bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 0)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 1023
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 0)
+ %b = and i64 %a, 1023
+ ret i64 %b
+}
+
+define i64 @vsetvl_e64m2_and12bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e64m2_and12bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 1)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 4095
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 1)
+ %b = and i64 %a, 4095
+ ret i64 %b
+}
+
+define i64 @vsetvl_e64m2_and11bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e64m2_and11bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 1)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 2047
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 1)
+ %b = and i64 %a, 2047
+ ret i64 %b
+}
+
+define i64 @vsetvl_e64m4_and13bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e64m4_and13bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 2)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 8191
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 2)
+ %b = and i64 %a, 8191
+ ret i64 %b
+}
+
+define i64 @vsetvl_e64m4_and12bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e64m4_and12bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 2)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 4095
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 2)
+ %b = and i64 %a, 4095
+ ret i64 %b
+}
+
+define i64 @vsetvl_e64m8_and14bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e64m8_and14bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 3)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 16383
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 3)
+ %b = and i64 %a, 16383
+ ret i64 %b
+}
+
+define i64 @vsetvl_e64m8_and13bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e64m8_and13bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 3)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 8191
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 3)
+ %b = and i64 %a, 8191
+ ret i64 %b
+}
+
+define i64 @vsetvl_e64mf2_and8bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e64mf2_and8bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 5)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 255
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 5)
+ %b = and i64 %a, 255
+ ret i64 %b
+}
+
+define i64 @vsetvl_e64mf2_and7bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e64mf2_and7bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 5)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 127
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 5)
+ %b = and i64 %a, 127
+ ret i64 %b
+}
+
+define i64 @vsetvl_e64mf4_and9bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e64mf4_and9bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 6)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 511
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 6)
+ %b = and i64 %a, 511
+ ret i64 %b
+}
+
+define i64 @vsetvl_e64mf4_and8bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e64mf4_and8bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 6)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 255
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 6)
+ %b = and i64 %a, 255
+ ret i64 %b
+}
+
+define i64 @vsetvl_e64mf8_and10bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e64mf8_and10bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 7)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 1023
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 7)
+ %b = and i64 %a, 1023
+ ret i64 %b
+}
+
+define i64 @vsetvl_e64mf8_and9bits(i64 %avl) {
+; CHECK-LABEL: @vsetvl_e64mf8_and9bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 7)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 511
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 7)
+ %b = and i64 %a, 511
+ ret i64 %b
}
diff --git a/llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvlimax-knownbits.ll b/llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvlimax-knownbits.ll
new file mode 100644
index 00000000000000..11e8a9ec117fbe
--- /dev/null
+++ b/llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvlimax-knownbits.ll
@@ -0,0 +1,682 @@
+; NOTE: Assertions have been autogenerated by utils/update_test_checks.py
+; RUN: opt < %s -passes=instcombine -S | FileCheck %s
+
+declare i32 @llvm.riscv.vsetvlimax.i32(i32, i32)
+declare i64 @llvm.riscv.vsetvlimax.i64(i64, i64)
+
+define i32 @vsetvlimax_i32() nounwind {
+; CHECK-LABEL: @vsetvlimax_i32(
+; CHECK-NEXT: entry:
+; CHECK-NEXT: [[TMP0:%.*]] = call i32 @llvm.riscv.vsetvlimax.i32(i32 1, i32 1)
+; CHECK-NEXT: ret i32 [[TMP0]]
+;
+entry:
+ %0 = call i32 @llvm.riscv.vsetvlimax.i32(i32 1, i32 1)
+ %1 = and i32 %0, 2147483647
+ ret i32 %1
+}
+
+define i64 @vsetvlimax_sext_i64() nounwind {
+; CHECK-LABEL: @vsetvlimax_sext_i64(
+; CHECK-NEXT: entry:
+; CHECK-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
+; CHECK-NEXT: ret i64 [[TMP0]]
+;
+entry:
+ %0 = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
+ %1 = trunc i64 %0 to i32
+ %2 = sext i32 %1 to i64
+ ret i64 %2
+}
+
+define i64 @vsetvlimax_zext_i64() nounwind {
+; CHECK-LABEL: @vsetvlimax_zext_i64(
+; CHECK-NEXT: entry:
+; CHECK-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
+; CHECK-NEXT: ret i64 [[TMP0]]
+;
+entry:
+ %0 = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
+ %1 = trunc i64 %0 to i32
+ %2 = zext i32 %1 to i64
+ ret i64 %2
+}
+
+define i32 @vsetvlimax_and17_i32() nounwind {
+; CHECK-LABEL: @vsetvlimax_and17_i32(
+; CHECK-NEXT: entry:
+; CHECK-NEXT: [[TMP0:%.*]] = call i32 @llvm.riscv.vsetvlimax.i32(i32 1, i32 1)
+; CHECK-NEXT: ret i32 [[TMP0]]
+;
+entry:
+ %0 = call i32 @llvm.riscv.vsetvlimax.i32(i32 1, i32 1)
+ %1 = and i32 %0, 131071
+ ret i32 %1
+}
+
+define i64 @vsetvlimax_and17_i64() nounwind {
+; CHECK-LABEL: @vsetvlimax_and17_i64(
+; CHECK-NEXT: entry:
+; CHECK-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
+; CHECK-NEXT: ret i64 [[TMP0]]
+;
+entry:
+ %0 = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
+ %1 = and i64 %0, 131071
+ ret i64 %1
+}
+
+define i64 @vsetvlmax_e8m1_and14bits() {
+; CHECK-LABEL: @vsetvlmax_e8m1_and14bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 0)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 16383
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 0)
+ %b = and i64 %a, 16383
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e8m1_and13bits() {
+; CHECK-LABEL: @vsetvlmax_e8m1_and13bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 0)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 8191
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 0)
+ %b = and i64 %a, 8191
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e8m2_and15bits() {
+; CHECK-LABEL: @vsetvlmax_e8m2_and15bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 1)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 32767
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 1)
+ %b = and i64 %a, 32767
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e8m2_and14bits() {
+; CHECK-LABEL: @vsetvlmax_e8m2_and14bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 1)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 16383
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 1)
+ %b = and i64 %a, 16383
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e8m4_and16bits() {
+; CHECK-LABEL: @vsetvlmax_e8m4_and16bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 2)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 65535
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 2)
+ %b = and i64 %a, 65535
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e8m4_and15bits() {
+; CHECK-LABEL: @vsetvlmax_e8m4_and15bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 2)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 32767
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 2)
+ %b = and i64 %a, 32767
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e8m8_and17bits() {
+; CHECK-LABEL: @vsetvlmax_e8m8_and17bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 3)
+; CHECK-NEXT: ret i64 [[A]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 3)
+ %b = and i64 %a, 131071
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e8m8_and16bits() {
+; CHECK-LABEL: @vsetvlmax_e8m8_and16bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 3)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 65535
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 3)
+ %b = and i64 %a, 65535
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e8mf2_and11bits() {
+; CHECK-LABEL: @vsetvlmax_e8mf2_and11bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 5)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 2047
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 5)
+ %b = and i64 %a, 2047
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e8mf2_and10bits() {
+; CHECK-LABEL: @vsetvlmax_e8mf2_and10bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 5)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 1023
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 5)
+ %b = and i64 %a, 1023
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e8mf4_and12bits() {
+; CHECK-LABEL: @vsetvlmax_e8mf4_and12bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 6)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 4095
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 6)
+ %b = and i64 %a, 4095
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e8mf4_and11bits() {
+; CHECK-LABEL: @vsetvlmax_e8mf4_and11bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 6)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 2047
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 6)
+ %b = and i64 %a, 2047
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e8mf8_and13bits() {
+; CHECK-LABEL: @vsetvlmax_e8mf8_and13bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 7)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 8191
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 7)
+ %b = and i64 %a, 8191
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e8mf8_and12bits() {
+; CHECK-LABEL: @vsetvlmax_e8mf8_and12bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 7)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 4095
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 7)
+ %b = and i64 %a, 4095
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e16m1_and13bits() {
+; CHECK-LABEL: @vsetvlmax_e16m1_and13bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 0)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 8191
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 0)
+ %b = and i64 %a, 8191
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e16m1_and12bits() {
+; CHECK-LABEL: @vsetvlmax_e16m1_and12bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 0)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 4095
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 0)
+ %b = and i64 %a, 4095
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e16m2_and14bits() {
+; CHECK-LABEL: @vsetvlmax_e16m2_and14bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 16383
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 1)
+ %b = and i64 %a, 16383
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e16m2_and13bits() {
+; CHECK-LABEL: @vsetvlmax_e16m2_and13bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 8191
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 1)
+ %b = and i64 %a, 8191
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e16m4_and15bits() {
+; CHECK-LABEL: @vsetvlmax_e16m4_and15bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 2)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 32767
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 2)
+ %b = and i64 %a, 32767
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e16m4_and14bits() {
+; CHECK-LABEL: @vsetvlmax_e16m4_and14bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 2)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 16383
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 2)
+ %b = and i64 %a, 16383
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e16m8_and16bits() {
+; CHECK-LABEL: @vsetvlmax_e16m8_and16bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 3)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 65535
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 3)
+ %b = and i64 %a, 65535
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e16m8_and15bits() {
+; CHECK-LABEL: @vsetvlmax_e16m8_and15bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 3)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 32767
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 3)
+ %b = and i64 %a, 32767
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e16mf2_and10bits() {
+; CHECK-LABEL: @vsetvlmax_e16mf2_and10bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 5)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 1023
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 5)
+ %b = and i64 %a, 1023
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e16mf2_and9bits() {
+; CHECK-LABEL: @vsetvlmax_e16mf2_and9bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 5)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 511
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 5)
+ %b = and i64 %a, 511
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e16mf4_and11bits() {
+; CHECK-LABEL: @vsetvlmax_e16mf4_and11bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 6)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 2047
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 6)
+ %b = and i64 %a, 2047
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e16mf4_and10bits() {
+; CHECK-LABEL: @vsetvlmax_e16mf4_and10bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 6)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 1023
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 6)
+ %b = and i64 %a, 1023
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e16mf8_and12bits() {
+; CHECK-LABEL: @vsetvlmax_e16mf8_and12bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 7)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 4095
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 7)
+ %b = and i64 %a, 4095
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e16mf8_and11bits() {
+; CHECK-LABEL: @vsetvlmax_e16mf8_and11bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 7)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 2047
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 7)
+ %b = and i64 %a, 2047
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e32m1_and12bits() {
+; CHECK-LABEL: @vsetvlmax_e32m1_and12bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 0)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 4095
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 0)
+ %b = and i64 %a, 4095
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e32m1_and11bits() {
+; CHECK-LABEL: @vsetvlmax_e32m1_and11bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 0)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 2047
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 0)
+ %b = and i64 %a, 2047
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e32m2_and13bits() {
+; CHECK-LABEL: @vsetvlmax_e32m2_and13bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 1)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 8191
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 1)
+ %b = and i64 %a, 8191
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e32m2_and12bits() {
+; CHECK-LABEL: @vsetvlmax_e32m2_and12bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 1)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 4095
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 1)
+ %b = and i64 %a, 4095
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e32m4_and14bits() {
+; CHECK-LABEL: @vsetvlmax_e32m4_and14bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 2)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 16383
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 2)
+ %b = and i64 %a, 16383
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e32m4_and13bits() {
+; CHECK-LABEL: @vsetvlmax_e32m4_and13bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 2)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 8191
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 2)
+ %b = and i64 %a, 8191
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e32m8_and15bits() {
+; CHECK-LABEL: @vsetvlmax_e32m8_and15bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 3)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 32767
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 3)
+ %b = and i64 %a, 32767
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e32m8_and14bits() {
+; CHECK-LABEL: @vsetvlmax_e32m8_and14bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 3)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 16383
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 3)
+ %b = and i64 %a, 16383
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e32mf2_and9bits() {
+; CHECK-LABEL: @vsetvlmax_e32mf2_and9bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 5)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 511
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 5)
+ %b = and i64 %a, 511
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e32mf2_and8bits() {
+; CHECK-LABEL: @vsetvlmax_e32mf2_and8bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 5)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 255
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 5)
+ %b = and i64 %a, 255
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e32mf4_and10bits() {
+; CHECK-LABEL: @vsetvlmax_e32mf4_and10bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 6)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 1023
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 6)
+ %b = and i64 %a, 1023
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e32mf4_and9bits() {
+; CHECK-LABEL: @vsetvlmax_e32mf4_and9bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 6)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 511
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 6)
+ %b = and i64 %a, 511
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e32mf8_and11bits() {
+; CHECK-LABEL: @vsetvlmax_e32mf8_and11bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 7)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 2047
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 7)
+ %b = and i64 %a, 2047
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e32mf8_and10bits() {
+; CHECK-LABEL: @vsetvlmax_e32mf8_and10bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 7)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 1023
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 7)
+ %b = and i64 %a, 1023
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e64m1_and11bits() {
+; CHECK-LABEL: @vsetvlmax_e64m1_and11bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 0)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 2047
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 0)
+ %b = and i64 %a, 2047
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e64m1_and10bits() {
+; CHECK-LABEL: @vsetvlmax_e64m1_and10bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 0)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 1023
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 0)
+ %b = and i64 %a, 1023
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e64m2_and12bits() {
+; CHECK-LABEL: @vsetvlmax_e64m2_and12bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 1)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 4095
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 1)
+ %b = and i64 %a, 4095
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e64m2_and11bits() {
+; CHECK-LABEL: @vsetvlmax_e64m2_and11bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 1)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 2047
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 1)
+ %b = and i64 %a, 2047
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e64m4_and13bits() {
+; CHECK-LABEL: @vsetvlmax_e64m4_and13bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 2)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 8191
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 2)
+ %b = and i64 %a, 8191
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e64m4_and12bits() {
+; CHECK-LABEL: @vsetvlmax_e64m4_and12bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 2)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 4095
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 2)
+ %b = and i64 %a, 4095
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e64m8_and14bits() {
+; CHECK-LABEL: @vsetvlmax_e64m8_and14bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 3)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 16383
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 3)
+ %b = and i64 %a, 16383
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e64m8_and13bits() {
+; CHECK-LABEL: @vsetvlmax_e64m8_and13bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 3)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 8191
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 3)
+ %b = and i64 %a, 8191
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e64mf2_and8bits() {
+; CHECK-LABEL: @vsetvlmax_e64mf2_and8bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 5)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 255
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 5)
+ %b = and i64 %a, 255
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e64mf2_and7bits() {
+; CHECK-LABEL: @vsetvlmax_e64mf2_and7bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 5)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 127
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 5)
+ %b = and i64 %a, 127
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e64mf4_and9bits() {
+; CHECK-LABEL: @vsetvlmax_e64mf4_and9bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 6)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 511
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 6)
+ %b = and i64 %a, 511
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e64mf4_and8bits() {
+; CHECK-LABEL: @vsetvlmax_e64mf4_and8bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 6)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 255
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 6)
+ %b = and i64 %a, 255
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e64mf8_and10bits() {
+; CHECK-LABEL: @vsetvlmax_e64mf8_and10bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 7)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 1023
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 7)
+ %b = and i64 %a, 1023
+ ret i64 %b
+}
+
+define i64 @vsetvlmax_e64mf8_and9bits() {
+; CHECK-LABEL: @vsetvlmax_e64mf8_and9bits(
+; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 7)
+; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 511
+; CHECK-NEXT: ret i64 [[B]]
+;
+ %a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 7)
+ %b = and i64 %a, 511
+ ret i64 %b
+}
>From 1d5c36d3e87b596c9e578ad3ce6c36cffba6eb0c Mon Sep 17 00:00:00 2001
From: Wang Pengcheng <wangpengcheng.pp at bytedance.com>
Date: Sun, 18 Feb 2024 17:09:47 +0800
Subject: [PATCH 2/2] [RISCV] Take SEW/LMUL into account for value tracking of
vsetvli[max]
So that we can benefit from some instcombine optimizations.
---
llvm/lib/Analysis/ValueTracking.cpp | 29 ++-
.../RISCV/riscv-vsetvli-knownbits.ll | 168 ++++++------------
.../RISCV/riscv-vsetvlimax-knownbits.ll | 165 ++++++-----------
3 files changed, 135 insertions(+), 227 deletions(-)
diff --git a/llvm/lib/Analysis/ValueTracking.cpp b/llvm/lib/Analysis/ValueTracking.cpp
index 1a076adb1bad0a..693ec80d831f0b 100644
--- a/llvm/lib/Analysis/ValueTracking.cpp
+++ b/llvm/lib/Analysis/ValueTracking.cpp
@@ -73,6 +73,7 @@
#include "llvm/Support/ErrorHandling.h"
#include "llvm/Support/KnownBits.h"
#include "llvm/Support/MathExtras.h"
+#include "llvm/TargetParser/RISCVTargetParser.h"
#include <algorithm>
#include <cassert>
#include <cstdint>
@@ -1576,12 +1577,30 @@ static void computeKnownBitsFromOperator(const Operator *I,
Known.Zero.setBitsFrom(32);
break;
case Intrinsic::riscv_vsetvli:
- case Intrinsic::riscv_vsetvlimax:
- // Assume that VL output is <= 65536.
- // TODO: Take SEW and LMUL into account.
- if (BitWidth > 17)
- Known.Zero.setBitsFrom(17);
+ case Intrinsic::riscv_vsetvlimax: {
+ bool HasAVL = II->getIntrinsicID() == Intrinsic::riscv_vsetvli;
+ const ConstantRange &Range =
+ getVScaleRange(II->getFunction(), BitWidth);
+ uint64_t SEW =
+ 1 << (cast<ConstantInt>(II->getArgOperand(HasAVL))->getZExtValue() +
+ 3);
+ uint64_t LMUL =
+ cast<ConstantInt>(II->getArgOperand(1 + HasAVL))->getZExtValue();
+ bool Fractional = LMUL > 4;
+ uint64_t MaxVL =
+ Range.getLower().getZExtValue() * RISCV::RVVBitsPerBlock / SEW;
+ MaxVL = Fractional ? MaxVL / (1 << (8 - LMUL)) : MaxVL * (1 << LMUL);
+
+ // Result of vsetvli must be not larger than AVL.
+ if (HasAVL)
+ if (auto *CI = dyn_cast<ConstantInt>(II->getArgOperand(0)))
+ MaxVL = std::min(MaxVL, CI->getZExtValue());
+
+ unsigned KnownZeroFirstBit = Log2_32(MaxVL) + 1;
+ if (BitWidth > KnownZeroFirstBit)
+ Known.Zero.setBitsFrom(KnownZeroFirstBit);
break;
+ }
case Intrinsic::vscale: {
if (!II->getParent() || !II->getFunction())
break;
diff --git a/llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvli-knownbits.ll b/llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvli-knownbits.ll
index 5e5fec9cab8913..d88db2832360d8 100644
--- a/llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvli-knownbits.ll
+++ b/llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvli-knownbits.ll
@@ -69,8 +69,7 @@ entry:
define i64 @vsetvl_e8m1_and14bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e8m1_and14bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 0)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 16383
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 0)
%b = and i64 %a, 16383
@@ -80,8 +79,7 @@ define i64 @vsetvl_e8m1_and14bits(i64 %avl) {
define i64 @vsetvl_e8m1_and13bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e8m1_and13bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 0)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 8191
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 0)
%b = and i64 %a, 8191
@@ -91,8 +89,7 @@ define i64 @vsetvl_e8m1_and13bits(i64 %avl) {
define i64 @vsetvl_e8m1_constant_avl() {
; CHECK-LABEL: @vsetvl_e8m1_constant_avl(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 1, i64 0, i64 0)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 1
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 1, i64 0, i64 0)
%b = and i64 %a, 1
@@ -102,8 +99,7 @@ define i64 @vsetvl_e8m1_constant_avl() {
define i64 @vsetvl_e8m2_and15bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e8m2_and15bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 1)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 32767
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 1)
%b = and i64 %a, 32767
@@ -113,8 +109,7 @@ define i64 @vsetvl_e8m2_and15bits(i64 %avl) {
define i64 @vsetvl_e8m2_and14bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e8m2_and14bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 1)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 16383
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 1)
%b = and i64 %a, 16383
@@ -124,8 +119,7 @@ define i64 @vsetvl_e8m2_and14bits(i64 %avl) {
define i64 @vsetvl_e8m4_and16bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e8m4_and16bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 2)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 65535
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 2)
%b = and i64 %a, 65535
@@ -135,8 +129,7 @@ define i64 @vsetvl_e8m4_and16bits(i64 %avl) {
define i64 @vsetvl_e8m4_and15bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e8m4_and15bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 2)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 32767
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 2)
%b = and i64 %a, 32767
@@ -156,8 +149,7 @@ define i64 @vsetvl_e8m8_and17bits(i64 %avl) {
define i64 @vsetvl_e8m8_and16bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e8m8_and16bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 3)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 65535
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 3)
%b = and i64 %a, 65535
@@ -167,8 +159,7 @@ define i64 @vsetvl_e8m8_and16bits(i64 %avl) {
define i64 @vsetvl_e8mf2_and11bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e8mf2_and11bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 5)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 2047
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 5)
%b = and i64 %a, 2047
@@ -178,8 +169,7 @@ define i64 @vsetvl_e8mf2_and11bits(i64 %avl) {
define i64 @vsetvl_e8mf2_and10bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e8mf2_and10bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 5)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 1023
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 5)
%b = and i64 %a, 1023
@@ -189,8 +179,7 @@ define i64 @vsetvl_e8mf2_and10bits(i64 %avl) {
define i64 @vsetvl_e8mf4_and12bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e8mf4_and12bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 6)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 4095
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 6)
%b = and i64 %a, 4095
@@ -200,8 +189,7 @@ define i64 @vsetvl_e8mf4_and12bits(i64 %avl) {
define i64 @vsetvl_e8mf4_and11bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e8mf4_and11bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 6)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 2047
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 6)
%b = and i64 %a, 2047
@@ -211,8 +199,7 @@ define i64 @vsetvl_e8mf4_and11bits(i64 %avl) {
define i64 @vsetvl_e8mf8_and13bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e8mf8_and13bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 7)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 8191
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 7)
%b = and i64 %a, 8191
@@ -222,8 +209,7 @@ define i64 @vsetvl_e8mf8_and13bits(i64 %avl) {
define i64 @vsetvl_e8mf8_and12bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e8mf8_and12bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 7)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 4095
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 7)
%b = and i64 %a, 4095
@@ -233,8 +219,7 @@ define i64 @vsetvl_e8mf8_and12bits(i64 %avl) {
define i64 @vsetvl_e16m1_and13bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e16m1_and13bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 0)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 8191
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 0)
%b = and i64 %a, 8191
@@ -244,8 +229,7 @@ define i64 @vsetvl_e16m1_and13bits(i64 %avl) {
define i64 @vsetvl_e16m1_and12bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e16m1_and12bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 0)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 4095
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 0)
%b = and i64 %a, 4095
@@ -255,8 +239,7 @@ define i64 @vsetvl_e16m1_and12bits(i64 %avl) {
define i64 @vsetvl_e16m2_and14bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e16m2_and14bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 1)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 16383
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 1)
%b = and i64 %a, 16383
@@ -266,8 +249,7 @@ define i64 @vsetvl_e16m2_and14bits(i64 %avl) {
define i64 @vsetvl_e16m2_and13bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e16m2_and13bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 1)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 8191
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 1)
%b = and i64 %a, 8191
@@ -277,8 +259,7 @@ define i64 @vsetvl_e16m2_and13bits(i64 %avl) {
define i64 @vsetvl_e16m4_and15bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e16m4_and15bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 2)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 32767
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 2)
%b = and i64 %a, 32767
@@ -288,8 +269,7 @@ define i64 @vsetvl_e16m4_and15bits(i64 %avl) {
define i64 @vsetvl_e16m4_and14bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e16m4_and14bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 2)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 16383
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 2)
%b = and i64 %a, 16383
@@ -299,8 +279,7 @@ define i64 @vsetvl_e16m4_and14bits(i64 %avl) {
define i64 @vsetvl_e16m8_and16bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e16m8_and16bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 3)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 65535
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 3)
%b = and i64 %a, 65535
@@ -310,8 +289,7 @@ define i64 @vsetvl_e16m8_and16bits(i64 %avl) {
define i64 @vsetvl_e16m8_and15bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e16m8_and15bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 3)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 32767
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 3)
%b = and i64 %a, 32767
@@ -321,8 +299,7 @@ define i64 @vsetvl_e16m8_and15bits(i64 %avl) {
define i64 @vsetvl_e16mf2_and10bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e16mf2_and10bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 5)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 1023
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 0
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 5)
%b = and i64 %a, 1023
@@ -332,8 +309,7 @@ define i64 @vsetvl_e16mf2_and10bits(i64 %avl) {
define i64 @vsetvl_e16mf2_and9bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e16mf2_and9bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 5)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 511
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 0
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 5)
%b = and i64 %a, 511
@@ -343,8 +319,7 @@ define i64 @vsetvl_e16mf2_and9bits(i64 %avl) {
define i64 @vsetvl_e16mf4_and11bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e16mf4_and11bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 6)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 2047
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 6)
%b = and i64 %a, 2047
@@ -354,8 +329,7 @@ define i64 @vsetvl_e16mf4_and11bits(i64 %avl) {
define i64 @vsetvl_e16mf4_and10bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e16mf4_and10bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 6)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 1023
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 6)
%b = and i64 %a, 1023
@@ -365,8 +339,7 @@ define i64 @vsetvl_e16mf4_and10bits(i64 %avl) {
define i64 @vsetvl_e16mf8_and12bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e16mf8_and12bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 7)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 4095
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 7)
%b = and i64 %a, 4095
@@ -376,8 +349,7 @@ define i64 @vsetvl_e16mf8_and12bits(i64 %avl) {
define i64 @vsetvl_e16mf8_and11bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e16mf8_and11bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 7)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 2047
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 7)
%b = and i64 %a, 2047
@@ -387,8 +359,7 @@ define i64 @vsetvl_e16mf8_and11bits(i64 %avl) {
define i64 @vsetvl_e32m1_and12bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e32m1_and12bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 0)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 4095
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 0)
%b = and i64 %a, 4095
@@ -398,8 +369,7 @@ define i64 @vsetvl_e32m1_and12bits(i64 %avl) {
define i64 @vsetvl_e32m1_and11bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e32m1_and11bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 0)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 2047
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 0)
%b = and i64 %a, 2047
@@ -409,8 +379,7 @@ define i64 @vsetvl_e32m1_and11bits(i64 %avl) {
define i64 @vsetvl_e32m2_and13bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e32m2_and13bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 1)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 8191
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 1)
%b = and i64 %a, 8191
@@ -420,8 +389,7 @@ define i64 @vsetvl_e32m2_and13bits(i64 %avl) {
define i64 @vsetvl_e32m2_and12bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e32m2_and12bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 1)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 4095
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 1)
%b = and i64 %a, 4095
@@ -431,8 +399,7 @@ define i64 @vsetvl_e32m2_and12bits(i64 %avl) {
define i64 @vsetvl_e32m4_and14bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e32m4_and14bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 2)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 16383
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 2)
%b = and i64 %a, 16383
@@ -442,8 +409,7 @@ define i64 @vsetvl_e32m4_and14bits(i64 %avl) {
define i64 @vsetvl_e32m4_and13bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e32m4_and13bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 2)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 8191
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 2)
%b = and i64 %a, 8191
@@ -453,8 +419,7 @@ define i64 @vsetvl_e32m4_and13bits(i64 %avl) {
define i64 @vsetvl_e32m8_and15bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e32m8_and15bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 3)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 32767
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 3)
%b = and i64 %a, 32767
@@ -464,8 +429,7 @@ define i64 @vsetvl_e32m8_and15bits(i64 %avl) {
define i64 @vsetvl_e32m8_and14bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e32m8_and14bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 3)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 16383
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 3)
%b = and i64 %a, 16383
@@ -475,8 +439,7 @@ define i64 @vsetvl_e32m8_and14bits(i64 %avl) {
define i64 @vsetvl_e32mf2_and9bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e32mf2_and9bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 5)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 511
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 0
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 5)
%b = and i64 %a, 511
@@ -486,8 +449,7 @@ define i64 @vsetvl_e32mf2_and9bits(i64 %avl) {
define i64 @vsetvl_e32mf2_and8bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e32mf2_and8bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 5)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 255
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 0
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 5)
%b = and i64 %a, 255
@@ -497,8 +459,7 @@ define i64 @vsetvl_e32mf2_and8bits(i64 %avl) {
define i64 @vsetvl_e32mf4_and10bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e32mf4_and10bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 6)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 1023
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 0
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 6)
%b = and i64 %a, 1023
@@ -508,8 +469,7 @@ define i64 @vsetvl_e32mf4_and10bits(i64 %avl) {
define i64 @vsetvl_e32mf4_and9bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e32mf4_and9bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 6)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 511
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 0
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 6)
%b = and i64 %a, 511
@@ -519,8 +479,7 @@ define i64 @vsetvl_e32mf4_and9bits(i64 %avl) {
define i64 @vsetvl_e32mf8_and11bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e32mf8_and11bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 7)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 2047
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 7)
%b = and i64 %a, 2047
@@ -530,8 +489,7 @@ define i64 @vsetvl_e32mf8_and11bits(i64 %avl) {
define i64 @vsetvl_e32mf8_and10bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e32mf8_and10bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 7)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 1023
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 7)
%b = and i64 %a, 1023
@@ -541,8 +499,7 @@ define i64 @vsetvl_e32mf8_and10bits(i64 %avl) {
define i64 @vsetvl_e64m1_and11bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e64m1_and11bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 0)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 2047
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 0)
%b = and i64 %a, 2047
@@ -552,8 +509,7 @@ define i64 @vsetvl_e64m1_and11bits(i64 %avl) {
define i64 @vsetvl_e64m1_and10bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e64m1_and10bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 0)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 1023
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 0)
%b = and i64 %a, 1023
@@ -563,8 +519,7 @@ define i64 @vsetvl_e64m1_and10bits(i64 %avl) {
define i64 @vsetvl_e64m2_and12bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e64m2_and12bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 1)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 4095
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 1)
%b = and i64 %a, 4095
@@ -574,8 +529,7 @@ define i64 @vsetvl_e64m2_and12bits(i64 %avl) {
define i64 @vsetvl_e64m2_and11bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e64m2_and11bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 1)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 2047
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 1)
%b = and i64 %a, 2047
@@ -585,8 +539,7 @@ define i64 @vsetvl_e64m2_and11bits(i64 %avl) {
define i64 @vsetvl_e64m4_and13bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e64m4_and13bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 2)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 8191
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 2)
%b = and i64 %a, 8191
@@ -596,8 +549,7 @@ define i64 @vsetvl_e64m4_and13bits(i64 %avl) {
define i64 @vsetvl_e64m4_and12bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e64m4_and12bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 2)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 4095
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 2)
%b = and i64 %a, 4095
@@ -607,8 +559,7 @@ define i64 @vsetvl_e64m4_and12bits(i64 %avl) {
define i64 @vsetvl_e64m8_and14bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e64m8_and14bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 3)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 16383
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 3)
%b = and i64 %a, 16383
@@ -618,8 +569,7 @@ define i64 @vsetvl_e64m8_and14bits(i64 %avl) {
define i64 @vsetvl_e64m8_and13bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e64m8_and13bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 3)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 8191
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 3)
%b = and i64 %a, 8191
@@ -629,8 +579,7 @@ define i64 @vsetvl_e64m8_and13bits(i64 %avl) {
define i64 @vsetvl_e64mf2_and8bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e64mf2_and8bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 5)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 255
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 0
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 5)
%b = and i64 %a, 255
@@ -640,8 +589,7 @@ define i64 @vsetvl_e64mf2_and8bits(i64 %avl) {
define i64 @vsetvl_e64mf2_and7bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e64mf2_and7bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 5)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 127
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 0
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 5)
%b = and i64 %a, 127
@@ -651,8 +599,7 @@ define i64 @vsetvl_e64mf2_and7bits(i64 %avl) {
define i64 @vsetvl_e64mf4_and9bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e64mf4_and9bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 6)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 511
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 0
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 6)
%b = and i64 %a, 511
@@ -662,8 +609,7 @@ define i64 @vsetvl_e64mf4_and9bits(i64 %avl) {
define i64 @vsetvl_e64mf4_and8bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e64mf4_and8bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 6)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 255
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 0
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 6)
%b = and i64 %a, 255
@@ -673,8 +619,7 @@ define i64 @vsetvl_e64mf4_and8bits(i64 %avl) {
define i64 @vsetvl_e64mf8_and10bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e64mf8_and10bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 7)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 1023
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 0
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 7)
%b = and i64 %a, 1023
@@ -684,8 +629,7 @@ define i64 @vsetvl_e64mf8_and10bits(i64 %avl) {
define i64 @vsetvl_e64mf8_and9bits(i64 %avl) {
; CHECK-LABEL: @vsetvl_e64mf8_and9bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 7)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 511
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 0
;
%a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 7)
%b = and i64 %a, 511
diff --git a/llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvlimax-knownbits.ll b/llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvlimax-knownbits.ll
index 11e8a9ec117fbe..fc564968df6930 100644
--- a/llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvlimax-knownbits.ll
+++ b/llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvlimax-knownbits.ll
@@ -69,8 +69,7 @@ entry:
define i64 @vsetvlmax_e8m1_and14bits() {
; CHECK-LABEL: @vsetvlmax_e8m1_and14bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 0)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 16383
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 0)
%b = and i64 %a, 16383
@@ -80,8 +79,7 @@ define i64 @vsetvlmax_e8m1_and14bits() {
define i64 @vsetvlmax_e8m1_and13bits() {
; CHECK-LABEL: @vsetvlmax_e8m1_and13bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 0)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 8191
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 0)
%b = and i64 %a, 8191
@@ -91,8 +89,7 @@ define i64 @vsetvlmax_e8m1_and13bits() {
define i64 @vsetvlmax_e8m2_and15bits() {
; CHECK-LABEL: @vsetvlmax_e8m2_and15bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 1)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 32767
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 1)
%b = and i64 %a, 32767
@@ -102,8 +99,7 @@ define i64 @vsetvlmax_e8m2_and15bits() {
define i64 @vsetvlmax_e8m2_and14bits() {
; CHECK-LABEL: @vsetvlmax_e8m2_and14bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 1)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 16383
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 1)
%b = and i64 %a, 16383
@@ -113,8 +109,7 @@ define i64 @vsetvlmax_e8m2_and14bits() {
define i64 @vsetvlmax_e8m4_and16bits() {
; CHECK-LABEL: @vsetvlmax_e8m4_and16bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 2)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 65535
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 2)
%b = and i64 %a, 65535
@@ -124,8 +119,7 @@ define i64 @vsetvlmax_e8m4_and16bits() {
define i64 @vsetvlmax_e8m4_and15bits() {
; CHECK-LABEL: @vsetvlmax_e8m4_and15bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 2)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 32767
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 2)
%b = and i64 %a, 32767
@@ -145,8 +139,7 @@ define i64 @vsetvlmax_e8m8_and17bits() {
define i64 @vsetvlmax_e8m8_and16bits() {
; CHECK-LABEL: @vsetvlmax_e8m8_and16bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 3)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 65535
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 3)
%b = and i64 %a, 65535
@@ -156,8 +149,7 @@ define i64 @vsetvlmax_e8m8_and16bits() {
define i64 @vsetvlmax_e8mf2_and11bits() {
; CHECK-LABEL: @vsetvlmax_e8mf2_and11bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 5)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 2047
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 5)
%b = and i64 %a, 2047
@@ -167,8 +159,7 @@ define i64 @vsetvlmax_e8mf2_and11bits() {
define i64 @vsetvlmax_e8mf2_and10bits() {
; CHECK-LABEL: @vsetvlmax_e8mf2_and10bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 5)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 1023
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 5)
%b = and i64 %a, 1023
@@ -178,8 +169,7 @@ define i64 @vsetvlmax_e8mf2_and10bits() {
define i64 @vsetvlmax_e8mf4_and12bits() {
; CHECK-LABEL: @vsetvlmax_e8mf4_and12bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 6)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 4095
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 6)
%b = and i64 %a, 4095
@@ -189,8 +179,7 @@ define i64 @vsetvlmax_e8mf4_and12bits() {
define i64 @vsetvlmax_e8mf4_and11bits() {
; CHECK-LABEL: @vsetvlmax_e8mf4_and11bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 6)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 2047
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 6)
%b = and i64 %a, 2047
@@ -200,8 +189,7 @@ define i64 @vsetvlmax_e8mf4_and11bits() {
define i64 @vsetvlmax_e8mf8_and13bits() {
; CHECK-LABEL: @vsetvlmax_e8mf8_and13bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 7)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 8191
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 7)
%b = and i64 %a, 8191
@@ -211,8 +199,7 @@ define i64 @vsetvlmax_e8mf8_and13bits() {
define i64 @vsetvlmax_e8mf8_and12bits() {
; CHECK-LABEL: @vsetvlmax_e8mf8_and12bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 0, i64 7)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 4095
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 0, i64 7)
%b = and i64 %a, 4095
@@ -222,8 +209,7 @@ define i64 @vsetvlmax_e8mf8_and12bits() {
define i64 @vsetvlmax_e16m1_and13bits() {
; CHECK-LABEL: @vsetvlmax_e16m1_and13bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 0)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 8191
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 0)
%b = and i64 %a, 8191
@@ -233,8 +219,7 @@ define i64 @vsetvlmax_e16m1_and13bits() {
define i64 @vsetvlmax_e16m1_and12bits() {
; CHECK-LABEL: @vsetvlmax_e16m1_and12bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 0)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 4095
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 0)
%b = and i64 %a, 4095
@@ -244,8 +229,7 @@ define i64 @vsetvlmax_e16m1_and12bits() {
define i64 @vsetvlmax_e16m2_and14bits() {
; CHECK-LABEL: @vsetvlmax_e16m2_and14bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 16383
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 1)
%b = and i64 %a, 16383
@@ -255,8 +239,7 @@ define i64 @vsetvlmax_e16m2_and14bits() {
define i64 @vsetvlmax_e16m2_and13bits() {
; CHECK-LABEL: @vsetvlmax_e16m2_and13bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 1)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 8191
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 1)
%b = and i64 %a, 8191
@@ -266,8 +249,7 @@ define i64 @vsetvlmax_e16m2_and13bits() {
define i64 @vsetvlmax_e16m4_and15bits() {
; CHECK-LABEL: @vsetvlmax_e16m4_and15bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 2)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 32767
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 2)
%b = and i64 %a, 32767
@@ -277,8 +259,7 @@ define i64 @vsetvlmax_e16m4_and15bits() {
define i64 @vsetvlmax_e16m4_and14bits() {
; CHECK-LABEL: @vsetvlmax_e16m4_and14bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 2)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 16383
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 2)
%b = and i64 %a, 16383
@@ -288,8 +269,7 @@ define i64 @vsetvlmax_e16m4_and14bits() {
define i64 @vsetvlmax_e16m8_and16bits() {
; CHECK-LABEL: @vsetvlmax_e16m8_and16bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 3)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 65535
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 3)
%b = and i64 %a, 65535
@@ -299,8 +279,7 @@ define i64 @vsetvlmax_e16m8_and16bits() {
define i64 @vsetvlmax_e16m8_and15bits() {
; CHECK-LABEL: @vsetvlmax_e16m8_and15bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 3)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 32767
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 3)
%b = and i64 %a, 32767
@@ -310,8 +289,7 @@ define i64 @vsetvlmax_e16m8_and15bits() {
define i64 @vsetvlmax_e16mf2_and10bits() {
; CHECK-LABEL: @vsetvlmax_e16mf2_and10bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 5)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 1023
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 0
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 5)
%b = and i64 %a, 1023
@@ -321,8 +299,7 @@ define i64 @vsetvlmax_e16mf2_and10bits() {
define i64 @vsetvlmax_e16mf2_and9bits() {
; CHECK-LABEL: @vsetvlmax_e16mf2_and9bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 5)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 511
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 0
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 5)
%b = and i64 %a, 511
@@ -332,8 +309,7 @@ define i64 @vsetvlmax_e16mf2_and9bits() {
define i64 @vsetvlmax_e16mf4_and11bits() {
; CHECK-LABEL: @vsetvlmax_e16mf4_and11bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 6)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 2047
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 6)
%b = and i64 %a, 2047
@@ -343,8 +319,7 @@ define i64 @vsetvlmax_e16mf4_and11bits() {
define i64 @vsetvlmax_e16mf4_and10bits() {
; CHECK-LABEL: @vsetvlmax_e16mf4_and10bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 6)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 1023
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 6)
%b = and i64 %a, 1023
@@ -354,8 +329,7 @@ define i64 @vsetvlmax_e16mf4_and10bits() {
define i64 @vsetvlmax_e16mf8_and12bits() {
; CHECK-LABEL: @vsetvlmax_e16mf8_and12bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 7)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 4095
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 7)
%b = and i64 %a, 4095
@@ -365,8 +339,7 @@ define i64 @vsetvlmax_e16mf8_and12bits() {
define i64 @vsetvlmax_e16mf8_and11bits() {
; CHECK-LABEL: @vsetvlmax_e16mf8_and11bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 1, i64 7)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 2047
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 1, i64 7)
%b = and i64 %a, 2047
@@ -376,8 +349,7 @@ define i64 @vsetvlmax_e16mf8_and11bits() {
define i64 @vsetvlmax_e32m1_and12bits() {
; CHECK-LABEL: @vsetvlmax_e32m1_and12bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 0)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 4095
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 0)
%b = and i64 %a, 4095
@@ -387,8 +359,7 @@ define i64 @vsetvlmax_e32m1_and12bits() {
define i64 @vsetvlmax_e32m1_and11bits() {
; CHECK-LABEL: @vsetvlmax_e32m1_and11bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 0)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 2047
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 0)
%b = and i64 %a, 2047
@@ -398,8 +369,7 @@ define i64 @vsetvlmax_e32m1_and11bits() {
define i64 @vsetvlmax_e32m2_and13bits() {
; CHECK-LABEL: @vsetvlmax_e32m2_and13bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 1)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 8191
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 1)
%b = and i64 %a, 8191
@@ -409,8 +379,7 @@ define i64 @vsetvlmax_e32m2_and13bits() {
define i64 @vsetvlmax_e32m2_and12bits() {
; CHECK-LABEL: @vsetvlmax_e32m2_and12bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 1)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 4095
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 1)
%b = and i64 %a, 4095
@@ -420,8 +389,7 @@ define i64 @vsetvlmax_e32m2_and12bits() {
define i64 @vsetvlmax_e32m4_and14bits() {
; CHECK-LABEL: @vsetvlmax_e32m4_and14bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 2)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 16383
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 2)
%b = and i64 %a, 16383
@@ -431,8 +399,7 @@ define i64 @vsetvlmax_e32m4_and14bits() {
define i64 @vsetvlmax_e32m4_and13bits() {
; CHECK-LABEL: @vsetvlmax_e32m4_and13bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 2)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 8191
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 2)
%b = and i64 %a, 8191
@@ -442,8 +409,7 @@ define i64 @vsetvlmax_e32m4_and13bits() {
define i64 @vsetvlmax_e32m8_and15bits() {
; CHECK-LABEL: @vsetvlmax_e32m8_and15bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 3)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 32767
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 3)
%b = and i64 %a, 32767
@@ -453,8 +419,7 @@ define i64 @vsetvlmax_e32m8_and15bits() {
define i64 @vsetvlmax_e32m8_and14bits() {
; CHECK-LABEL: @vsetvlmax_e32m8_and14bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 3)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 16383
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 3)
%b = and i64 %a, 16383
@@ -464,8 +429,7 @@ define i64 @vsetvlmax_e32m8_and14bits() {
define i64 @vsetvlmax_e32mf2_and9bits() {
; CHECK-LABEL: @vsetvlmax_e32mf2_and9bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 5)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 511
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 0
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 5)
%b = and i64 %a, 511
@@ -475,8 +439,7 @@ define i64 @vsetvlmax_e32mf2_and9bits() {
define i64 @vsetvlmax_e32mf2_and8bits() {
; CHECK-LABEL: @vsetvlmax_e32mf2_and8bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 5)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 255
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 0
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 5)
%b = and i64 %a, 255
@@ -486,8 +449,7 @@ define i64 @vsetvlmax_e32mf2_and8bits() {
define i64 @vsetvlmax_e32mf4_and10bits() {
; CHECK-LABEL: @vsetvlmax_e32mf4_and10bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 6)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 1023
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 0
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 6)
%b = and i64 %a, 1023
@@ -497,8 +459,7 @@ define i64 @vsetvlmax_e32mf4_and10bits() {
define i64 @vsetvlmax_e32mf4_and9bits() {
; CHECK-LABEL: @vsetvlmax_e32mf4_and9bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 6)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 511
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 0
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 6)
%b = and i64 %a, 511
@@ -508,8 +469,7 @@ define i64 @vsetvlmax_e32mf4_and9bits() {
define i64 @vsetvlmax_e32mf8_and11bits() {
; CHECK-LABEL: @vsetvlmax_e32mf8_and11bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 7)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 2047
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 7)
%b = and i64 %a, 2047
@@ -519,8 +479,7 @@ define i64 @vsetvlmax_e32mf8_and11bits() {
define i64 @vsetvlmax_e32mf8_and10bits() {
; CHECK-LABEL: @vsetvlmax_e32mf8_and10bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 2, i64 7)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 1023
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 2, i64 7)
%b = and i64 %a, 1023
@@ -530,8 +489,7 @@ define i64 @vsetvlmax_e32mf8_and10bits() {
define i64 @vsetvlmax_e64m1_and11bits() {
; CHECK-LABEL: @vsetvlmax_e64m1_and11bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 0)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 2047
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 0)
%b = and i64 %a, 2047
@@ -541,8 +499,7 @@ define i64 @vsetvlmax_e64m1_and11bits() {
define i64 @vsetvlmax_e64m1_and10bits() {
; CHECK-LABEL: @vsetvlmax_e64m1_and10bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 0)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 1023
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 0)
%b = and i64 %a, 1023
@@ -552,8 +509,7 @@ define i64 @vsetvlmax_e64m1_and10bits() {
define i64 @vsetvlmax_e64m2_and12bits() {
; CHECK-LABEL: @vsetvlmax_e64m2_and12bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 1)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 4095
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 1)
%b = and i64 %a, 4095
@@ -563,8 +519,7 @@ define i64 @vsetvlmax_e64m2_and12bits() {
define i64 @vsetvlmax_e64m2_and11bits() {
; CHECK-LABEL: @vsetvlmax_e64m2_and11bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 1)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 2047
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 1)
%b = and i64 %a, 2047
@@ -574,8 +529,7 @@ define i64 @vsetvlmax_e64m2_and11bits() {
define i64 @vsetvlmax_e64m4_and13bits() {
; CHECK-LABEL: @vsetvlmax_e64m4_and13bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 2)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 8191
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 2)
%b = and i64 %a, 8191
@@ -585,8 +539,7 @@ define i64 @vsetvlmax_e64m4_and13bits() {
define i64 @vsetvlmax_e64m4_and12bits() {
; CHECK-LABEL: @vsetvlmax_e64m4_and12bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 2)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 4095
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 2)
%b = and i64 %a, 4095
@@ -596,8 +549,7 @@ define i64 @vsetvlmax_e64m4_and12bits() {
define i64 @vsetvlmax_e64m8_and14bits() {
; CHECK-LABEL: @vsetvlmax_e64m8_and14bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 3)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 16383
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 3)
%b = and i64 %a, 16383
@@ -607,8 +559,7 @@ define i64 @vsetvlmax_e64m8_and14bits() {
define i64 @vsetvlmax_e64m8_and13bits() {
; CHECK-LABEL: @vsetvlmax_e64m8_and13bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 3)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 8191
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 [[A]]
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 3)
%b = and i64 %a, 8191
@@ -618,8 +569,7 @@ define i64 @vsetvlmax_e64m8_and13bits() {
define i64 @vsetvlmax_e64mf2_and8bits() {
; CHECK-LABEL: @vsetvlmax_e64mf2_and8bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 5)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 255
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 0
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 5)
%b = and i64 %a, 255
@@ -629,8 +579,7 @@ define i64 @vsetvlmax_e64mf2_and8bits() {
define i64 @vsetvlmax_e64mf2_and7bits() {
; CHECK-LABEL: @vsetvlmax_e64mf2_and7bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 5)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 127
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 0
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 5)
%b = and i64 %a, 127
@@ -640,8 +589,7 @@ define i64 @vsetvlmax_e64mf2_and7bits() {
define i64 @vsetvlmax_e64mf4_and9bits() {
; CHECK-LABEL: @vsetvlmax_e64mf4_and9bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 6)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 511
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 0
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 6)
%b = and i64 %a, 511
@@ -651,8 +599,7 @@ define i64 @vsetvlmax_e64mf4_and9bits() {
define i64 @vsetvlmax_e64mf4_and8bits() {
; CHECK-LABEL: @vsetvlmax_e64mf4_and8bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 6)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 255
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 0
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 6)
%b = and i64 %a, 255
@@ -662,8 +609,7 @@ define i64 @vsetvlmax_e64mf4_and8bits() {
define i64 @vsetvlmax_e64mf8_and10bits() {
; CHECK-LABEL: @vsetvlmax_e64mf8_and10bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 7)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 1023
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 0
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 7)
%b = and i64 %a, 1023
@@ -673,8 +619,7 @@ define i64 @vsetvlmax_e64mf8_and10bits() {
define i64 @vsetvlmax_e64mf8_and9bits() {
; CHECK-LABEL: @vsetvlmax_e64mf8_and9bits(
; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvlimax.i64(i64 3, i64 7)
-; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 511
-; CHECK-NEXT: ret i64 [[B]]
+; CHECK-NEXT: ret i64 0
;
%a = call i64 @llvm.riscv.vsetvlimax(i64 3, i64 7)
%b = and i64 %a, 511
More information about the llvm-commits
mailing list