[llvm] 5cdd204 - [RISCV] Remove -riscv-v-vector-bits-max from reverse tests. NFC

Luke Lau via llvm-commits llvm-commits at lists.llvm.org
Fri Aug 16 00:49:28 PDT 2024


Author: Luke Lau
Date: 2024-08-16T15:49:08+08:00
New Revision: 5cdd2042a01881decd71fbfb7a465e4927a854a3

URL: https://github.com/llvm/llvm-project/commit/5cdd2042a01881decd71fbfb7a465e4927a854a3
DIFF: https://github.com/llvm/llvm-project/commit/5cdd2042a01881decd71fbfb7a465e4927a854a3.diff

LOG: [RISCV] Remove -riscv-v-vector-bits-max from reverse tests. NFC

There doesn't seem to be any difference in the output anymore between
the options.

Added: 
    

Modified: 
    llvm/test/CodeGen/RISCV/rvv/fixed-vectors-shuffle-reverse.ll

Removed: 
    


################################################################################
diff  --git a/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-shuffle-reverse.ll b/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-shuffle-reverse.ll
index d6e66577fa97a7..bab2e9ba196374 100644
--- a/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-shuffle-reverse.ll
+++ b/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-shuffle-reverse.ll
@@ -1,10 +1,6 @@
 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
-; RUN: llc -mtriple=riscv32 -mattr=+m,+v,+f,+d,+zfh,+zvfh -verify-machineinstrs < %s | FileCheck %s --check-prefixes=CHECK,NO-ZVBB,RV32-BITS-UNKNOWN
-; RUN: llc -mtriple=riscv32 -mattr=+m,+v,+f,+d,+zfh,+zvfh -riscv-v-vector-bits-max=256 -verify-machineinstrs < %s | FileCheck %s --check-prefixes=CHECK,NO-ZVBB,RV32-BITS-256
-; RUN: llc -mtriple=riscv32 -mattr=+m,+v,+f,+d,+zfh,+zvfh -riscv-v-vector-bits-max=512 -verify-machineinstrs < %s | FileCheck %s --check-prefixes=CHECK,NO-ZVBB,RV32-BITS-512
-; RUN: llc -mtriple=riscv64 -mattr=+m,+v,+f,+d,+zfh,+zvfh -verify-machineinstrs < %s | FileCheck %s --check-prefixes=CHECK,NO-ZVBB,RV64-BITS-UNKNOWN
-; RUN: llc -mtriple=riscv64 -mattr=+m,+v,+f,+d,+zfh,+zvfh -riscv-v-vector-bits-max=256 -verify-machineinstrs < %s | FileCheck %s --check-prefixes=CHECK,NO-ZVBB,RV64-BITS-256
-; RUN: llc -mtriple=riscv64 -mattr=+m,+v,+f,+d,+zfh,+zvfh -riscv-v-vector-bits-max=512 -verify-machineinstrs < %s | FileCheck %s --check-prefixes=CHECK,NO-ZVBB,RV64-BITS-512
+; RUN: llc -mtriple=riscv32 -mattr=+m,+v,+f,+d,+zfh,+zvfh -verify-machineinstrs < %s | FileCheck %s --check-prefixes=CHECK,NO-ZVBB,RV32
+; RUN: llc -mtriple=riscv64 -mattr=+m,+v,+f,+d,+zfh,+zvfh -verify-machineinstrs < %s | FileCheck %s --check-prefixes=CHECK,NO-ZVBB,RV64
 ; RUN: llc -mtriple=riscv32 -mattr=+m,+v,+f,+d,+zfh,+zvfh,+zvbb -verify-machineinstrs < %s | FileCheck %s --check-prefixes=CHECK,ZVBB,RV32-ZVBB
 ; RUN: llc -mtriple=riscv64 -mattr=+m,+v,+f,+d,+zfh,+zvfh,+zvbb -verify-machineinstrs < %s | FileCheck %s --check-prefixes=CHECK,ZVBB,RV64-ZVBB
 
@@ -648,65 +644,25 @@ define <8 x double> @reverse_v8f64(<8 x double> %a) {
 
 
 define <3 x i64> @reverse_v3i64(<3 x i64> %a) {
-; RV32-BITS-UNKNOWN-LABEL: reverse_v3i64:
-; RV32-BITS-UNKNOWN:       # %bb.0:
-; RV32-BITS-UNKNOWN-NEXT:    lui a0, %hi(.LCPI44_0)
-; RV32-BITS-UNKNOWN-NEXT:    addi a0, a0, %lo(.LCPI44_0)
-; RV32-BITS-UNKNOWN-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
-; RV32-BITS-UNKNOWN-NEXT:    vle16.v v12, (a0)
-; RV32-BITS-UNKNOWN-NEXT:    vrgatherei16.vv v10, v8, v12
-; RV32-BITS-UNKNOWN-NEXT:    vmv.v.v v8, v10
-; RV32-BITS-UNKNOWN-NEXT:    ret
-;
-; RV32-BITS-256-LABEL: reverse_v3i64:
-; RV32-BITS-256:       # %bb.0:
-; RV32-BITS-256-NEXT:    lui a0, %hi(.LCPI44_0)
-; RV32-BITS-256-NEXT:    addi a0, a0, %lo(.LCPI44_0)
-; RV32-BITS-256-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
-; RV32-BITS-256-NEXT:    vle16.v v12, (a0)
-; RV32-BITS-256-NEXT:    vrgatherei16.vv v10, v8, v12
-; RV32-BITS-256-NEXT:    vmv.v.v v8, v10
-; RV32-BITS-256-NEXT:    ret
-;
-; RV32-BITS-512-LABEL: reverse_v3i64:
-; RV32-BITS-512:       # %bb.0:
-; RV32-BITS-512-NEXT:    lui a0, %hi(.LCPI44_0)
-; RV32-BITS-512-NEXT:    addi a0, a0, %lo(.LCPI44_0)
-; RV32-BITS-512-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
-; RV32-BITS-512-NEXT:    vle16.v v12, (a0)
-; RV32-BITS-512-NEXT:    vrgatherei16.vv v10, v8, v12
-; RV32-BITS-512-NEXT:    vmv.v.v v8, v10
-; RV32-BITS-512-NEXT:    ret
-;
-; RV64-BITS-UNKNOWN-LABEL: reverse_v3i64:
-; RV64-BITS-UNKNOWN:       # %bb.0:
-; RV64-BITS-UNKNOWN-NEXT:    vsetivli zero, 4, e16, mf2, ta, ma
-; RV64-BITS-UNKNOWN-NEXT:    vid.v v10
-; RV64-BITS-UNKNOWN-NEXT:    vrsub.vi v12, v10, 2
-; RV64-BITS-UNKNOWN-NEXT:    vsetvli zero, zero, e64, m2, ta, ma
-; RV64-BITS-UNKNOWN-NEXT:    vrgatherei16.vv v10, v8, v12
-; RV64-BITS-UNKNOWN-NEXT:    vmv.v.v v8, v10
-; RV64-BITS-UNKNOWN-NEXT:    ret
-;
-; RV64-BITS-256-LABEL: reverse_v3i64:
-; RV64-BITS-256:       # %bb.0:
-; RV64-BITS-256-NEXT:    vsetivli zero, 4, e16, mf2, ta, ma
-; RV64-BITS-256-NEXT:    vid.v v10
-; RV64-BITS-256-NEXT:    vrsub.vi v12, v10, 2
-; RV64-BITS-256-NEXT:    vsetvli zero, zero, e64, m2, ta, ma
-; RV64-BITS-256-NEXT:    vrgatherei16.vv v10, v8, v12
-; RV64-BITS-256-NEXT:    vmv.v.v v8, v10
-; RV64-BITS-256-NEXT:    ret
-;
-; RV64-BITS-512-LABEL: reverse_v3i64:
-; RV64-BITS-512:       # %bb.0:
-; RV64-BITS-512-NEXT:    vsetivli zero, 4, e16, mf2, ta, ma
-; RV64-BITS-512-NEXT:    vid.v v10
-; RV64-BITS-512-NEXT:    vrsub.vi v12, v10, 2
-; RV64-BITS-512-NEXT:    vsetvli zero, zero, e64, m2, ta, ma
-; RV64-BITS-512-NEXT:    vrgatherei16.vv v10, v8, v12
-; RV64-BITS-512-NEXT:    vmv.v.v v8, v10
-; RV64-BITS-512-NEXT:    ret
+; RV32-LABEL: reverse_v3i64:
+; RV32:       # %bb.0:
+; RV32-NEXT:    lui a0, %hi(.LCPI44_0)
+; RV32-NEXT:    addi a0, a0, %lo(.LCPI44_0)
+; RV32-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
+; RV32-NEXT:    vle16.v v12, (a0)
+; RV32-NEXT:    vrgatherei16.vv v10, v8, v12
+; RV32-NEXT:    vmv.v.v v8, v10
+; RV32-NEXT:    ret
+;
+; RV64-LABEL: reverse_v3i64:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 4, e16, mf2, ta, ma
+; RV64-NEXT:    vid.v v10
+; RV64-NEXT:    vrsub.vi v12, v10, 2
+; RV64-NEXT:    vsetvli zero, zero, e64, m2, ta, ma
+; RV64-NEXT:    vrgatherei16.vv v10, v8, v12
+; RV64-NEXT:    vmv.v.v v8, v10
+; RV64-NEXT:    ret
 ;
 ; RV32-ZVBB-LABEL: reverse_v3i64:
 ; RV32-ZVBB:       # %bb.0:
@@ -732,65 +688,25 @@ define <3 x i64> @reverse_v3i64(<3 x i64> %a) {
 }
 
 define <6 x i64> @reverse_v6i64(<6 x i64> %a) {
-; RV32-BITS-UNKNOWN-LABEL: reverse_v6i64:
-; RV32-BITS-UNKNOWN:       # %bb.0:
-; RV32-BITS-UNKNOWN-NEXT:    lui a0, %hi(.LCPI45_0)
-; RV32-BITS-UNKNOWN-NEXT:    addi a0, a0, %lo(.LCPI45_0)
-; RV32-BITS-UNKNOWN-NEXT:    vsetivli zero, 16, e32, m4, ta, ma
-; RV32-BITS-UNKNOWN-NEXT:    vle16.v v16, (a0)
-; RV32-BITS-UNKNOWN-NEXT:    vrgatherei16.vv v12, v8, v16
-; RV32-BITS-UNKNOWN-NEXT:    vmv.v.v v8, v12
-; RV32-BITS-UNKNOWN-NEXT:    ret
-;
-; RV32-BITS-256-LABEL: reverse_v6i64:
-; RV32-BITS-256:       # %bb.0:
-; RV32-BITS-256-NEXT:    lui a0, %hi(.LCPI45_0)
-; RV32-BITS-256-NEXT:    addi a0, a0, %lo(.LCPI45_0)
-; RV32-BITS-256-NEXT:    vsetivli zero, 16, e32, m4, ta, ma
-; RV32-BITS-256-NEXT:    vle16.v v16, (a0)
-; RV32-BITS-256-NEXT:    vrgatherei16.vv v12, v8, v16
-; RV32-BITS-256-NEXT:    vmv.v.v v8, v12
-; RV32-BITS-256-NEXT:    ret
-;
-; RV32-BITS-512-LABEL: reverse_v6i64:
-; RV32-BITS-512:       # %bb.0:
-; RV32-BITS-512-NEXT:    lui a0, %hi(.LCPI45_0)
-; RV32-BITS-512-NEXT:    addi a0, a0, %lo(.LCPI45_0)
-; RV32-BITS-512-NEXT:    vsetivli zero, 16, e32, m4, ta, ma
-; RV32-BITS-512-NEXT:    vle16.v v16, (a0)
-; RV32-BITS-512-NEXT:    vrgatherei16.vv v12, v8, v16
-; RV32-BITS-512-NEXT:    vmv.v.v v8, v12
-; RV32-BITS-512-NEXT:    ret
-;
-; RV64-BITS-UNKNOWN-LABEL: reverse_v6i64:
-; RV64-BITS-UNKNOWN:       # %bb.0:
-; RV64-BITS-UNKNOWN-NEXT:    vsetivli zero, 8, e16, m1, ta, ma
-; RV64-BITS-UNKNOWN-NEXT:    vid.v v12
-; RV64-BITS-UNKNOWN-NEXT:    vrsub.vi v16, v12, 5
-; RV64-BITS-UNKNOWN-NEXT:    vsetvli zero, zero, e64, m4, ta, ma
-; RV64-BITS-UNKNOWN-NEXT:    vrgatherei16.vv v12, v8, v16
-; RV64-BITS-UNKNOWN-NEXT:    vmv.v.v v8, v12
-; RV64-BITS-UNKNOWN-NEXT:    ret
-;
-; RV64-BITS-256-LABEL: reverse_v6i64:
-; RV64-BITS-256:       # %bb.0:
-; RV64-BITS-256-NEXT:    vsetivli zero, 8, e16, m1, ta, ma
-; RV64-BITS-256-NEXT:    vid.v v12
-; RV64-BITS-256-NEXT:    vrsub.vi v16, v12, 5
-; RV64-BITS-256-NEXT:    vsetvli zero, zero, e64, m4, ta, ma
-; RV64-BITS-256-NEXT:    vrgatherei16.vv v12, v8, v16
-; RV64-BITS-256-NEXT:    vmv.v.v v8, v12
-; RV64-BITS-256-NEXT:    ret
-;
-; RV64-BITS-512-LABEL: reverse_v6i64:
-; RV64-BITS-512:       # %bb.0:
-; RV64-BITS-512-NEXT:    vsetivli zero, 8, e16, m1, ta, ma
-; RV64-BITS-512-NEXT:    vid.v v12
-; RV64-BITS-512-NEXT:    vrsub.vi v16, v12, 5
-; RV64-BITS-512-NEXT:    vsetvli zero, zero, e64, m4, ta, ma
-; RV64-BITS-512-NEXT:    vrgatherei16.vv v12, v8, v16
-; RV64-BITS-512-NEXT:    vmv.v.v v8, v12
-; RV64-BITS-512-NEXT:    ret
+; RV32-LABEL: reverse_v6i64:
+; RV32:       # %bb.0:
+; RV32-NEXT:    lui a0, %hi(.LCPI45_0)
+; RV32-NEXT:    addi a0, a0, %lo(.LCPI45_0)
+; RV32-NEXT:    vsetivli zero, 16, e32, m4, ta, ma
+; RV32-NEXT:    vle16.v v16, (a0)
+; RV32-NEXT:    vrgatherei16.vv v12, v8, v16
+; RV32-NEXT:    vmv.v.v v8, v12
+; RV32-NEXT:    ret
+;
+; RV64-LABEL: reverse_v6i64:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 8, e16, m1, ta, ma
+; RV64-NEXT:    vid.v v12
+; RV64-NEXT:    vrsub.vi v16, v12, 5
+; RV64-NEXT:    vsetvli zero, zero, e64, m4, ta, ma
+; RV64-NEXT:    vrgatherei16.vv v12, v8, v16
+; RV64-NEXT:    vmv.v.v v8, v12
+; RV64-NEXT:    ret
 ;
 ; RV32-ZVBB-LABEL: reverse_v6i64:
 ; RV32-ZVBB:       # %bb.0:
@@ -816,68 +732,26 @@ define <6 x i64> @reverse_v6i64(<6 x i64> %a) {
 }
 
 define <12 x i64> @reverse_v12i64(<12 x i64> %a) {
-; RV32-BITS-UNKNOWN-LABEL: reverse_v12i64:
-; RV32-BITS-UNKNOWN:       # %bb.0:
-; RV32-BITS-UNKNOWN-NEXT:    li a0, 32
-; RV32-BITS-UNKNOWN-NEXT:    lui a1, %hi(.LCPI46_0)
-; RV32-BITS-UNKNOWN-NEXT:    addi a1, a1, %lo(.LCPI46_0)
-; RV32-BITS-UNKNOWN-NEXT:    vsetvli zero, a0, e32, m8, ta, ma
-; RV32-BITS-UNKNOWN-NEXT:    vle16.v v24, (a1)
-; RV32-BITS-UNKNOWN-NEXT:    vrgatherei16.vv v16, v8, v24
-; RV32-BITS-UNKNOWN-NEXT:    vmv.v.v v8, v16
-; RV32-BITS-UNKNOWN-NEXT:    ret
-;
-; RV32-BITS-256-LABEL: reverse_v12i64:
-; RV32-BITS-256:       # %bb.0:
-; RV32-BITS-256-NEXT:    li a0, 32
-; RV32-BITS-256-NEXT:    lui a1, %hi(.LCPI46_0)
-; RV32-BITS-256-NEXT:    addi a1, a1, %lo(.LCPI46_0)
-; RV32-BITS-256-NEXT:    vsetvli zero, a0, e32, m8, ta, ma
-; RV32-BITS-256-NEXT:    vle16.v v24, (a1)
-; RV32-BITS-256-NEXT:    vrgatherei16.vv v16, v8, v24
-; RV32-BITS-256-NEXT:    vmv.v.v v8, v16
-; RV32-BITS-256-NEXT:    ret
-;
-; RV32-BITS-512-LABEL: reverse_v12i64:
-; RV32-BITS-512:       # %bb.0:
-; RV32-BITS-512-NEXT:    li a0, 32
-; RV32-BITS-512-NEXT:    lui a1, %hi(.LCPI46_0)
-; RV32-BITS-512-NEXT:    addi a1, a1, %lo(.LCPI46_0)
-; RV32-BITS-512-NEXT:    vsetvli zero, a0, e32, m8, ta, ma
-; RV32-BITS-512-NEXT:    vle16.v v24, (a1)
-; RV32-BITS-512-NEXT:    vrgatherei16.vv v16, v8, v24
-; RV32-BITS-512-NEXT:    vmv.v.v v8, v16
-; RV32-BITS-512-NEXT:    ret
-;
-; RV64-BITS-UNKNOWN-LABEL: reverse_v12i64:
-; RV64-BITS-UNKNOWN:       # %bb.0:
-; RV64-BITS-UNKNOWN-NEXT:    vsetivli zero, 16, e16, m2, ta, ma
-; RV64-BITS-UNKNOWN-NEXT:    vid.v v16
-; RV64-BITS-UNKNOWN-NEXT:    vrsub.vi v24, v16, 11
-; RV64-BITS-UNKNOWN-NEXT:    vsetvli zero, zero, e64, m8, ta, ma
-; RV64-BITS-UNKNOWN-NEXT:    vrgatherei16.vv v16, v8, v24
-; RV64-BITS-UNKNOWN-NEXT:    vmv.v.v v8, v16
-; RV64-BITS-UNKNOWN-NEXT:    ret
-;
-; RV64-BITS-256-LABEL: reverse_v12i64:
-; RV64-BITS-256:       # %bb.0:
-; RV64-BITS-256-NEXT:    vsetivli zero, 16, e16, m2, ta, ma
-; RV64-BITS-256-NEXT:    vid.v v16
-; RV64-BITS-256-NEXT:    vrsub.vi v24, v16, 11
-; RV64-BITS-256-NEXT:    vsetvli zero, zero, e64, m8, ta, ma
-; RV64-BITS-256-NEXT:    vrgatherei16.vv v16, v8, v24
-; RV64-BITS-256-NEXT:    vmv.v.v v8, v16
-; RV64-BITS-256-NEXT:    ret
-;
-; RV64-BITS-512-LABEL: reverse_v12i64:
-; RV64-BITS-512:       # %bb.0:
-; RV64-BITS-512-NEXT:    vsetivli zero, 16, e16, m2, ta, ma
-; RV64-BITS-512-NEXT:    vid.v v16
-; RV64-BITS-512-NEXT:    vrsub.vi v24, v16, 11
-; RV64-BITS-512-NEXT:    vsetvli zero, zero, e64, m8, ta, ma
-; RV64-BITS-512-NEXT:    vrgatherei16.vv v16, v8, v24
-; RV64-BITS-512-NEXT:    vmv.v.v v8, v16
-; RV64-BITS-512-NEXT:    ret
+; RV32-LABEL: reverse_v12i64:
+; RV32:       # %bb.0:
+; RV32-NEXT:    li a0, 32
+; RV32-NEXT:    lui a1, %hi(.LCPI46_0)
+; RV32-NEXT:    addi a1, a1, %lo(.LCPI46_0)
+; RV32-NEXT:    vsetvli zero, a0, e32, m8, ta, ma
+; RV32-NEXT:    vle16.v v24, (a1)
+; RV32-NEXT:    vrgatherei16.vv v16, v8, v24
+; RV32-NEXT:    vmv.v.v v8, v16
+; RV32-NEXT:    ret
+;
+; RV64-LABEL: reverse_v12i64:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 16, e16, m2, ta, ma
+; RV64-NEXT:    vid.v v16
+; RV64-NEXT:    vrsub.vi v24, v16, 11
+; RV64-NEXT:    vsetvli zero, zero, e64, m8, ta, ma
+; RV64-NEXT:    vrgatherei16.vv v16, v8, v24
+; RV64-NEXT:    vmv.v.v v8, v16
+; RV64-NEXT:    ret
 ;
 ; RV32-ZVBB-LABEL: reverse_v12i64:
 ; RV32-ZVBB:       # %bb.0:


        


More information about the llvm-commits mailing list