[llvm] dbd884c - [RISCV][GISEL] Add vector RegisterBanks and vector support in getRegBankFromRegClass

Michael Maitland via llvm-commits llvm-commits at lists.llvm.org
Wed Nov 15 15:09:20 PST 2023


Author: Michael Maitland
Date: 2023-11-15T15:08:29-08:00
New Revision: dbd884cd3da720b0adbd52142f1426745ab9d181

URL: https://github.com/llvm/llvm-project/commit/dbd884cd3da720b0adbd52142f1426745ab9d181
DIFF: https://github.com/llvm/llvm-project/commit/dbd884cd3da720b0adbd52142f1426745ab9d181.diff

LOG: [RISCV][GISEL] Add vector RegisterBanks and vector support in getRegBankFromRegClass

Vector Register banks are created for the various register vector
register groupings. getRegBankFromRegClass is implemented to go from
vector TargetRegisterClass to the corresponding vector RegisterBank.

Added: 
    llvm/test/CodeGen/RISCV/GlobalISel/regbankselect/vec-args-ret.mir

Modified: 
    llvm/lib/CodeGen/MachineVerifier.cpp
    llvm/lib/Target/RISCV/GISel/RISCVRegisterBankInfo.cpp
    llvm/lib/Target/RISCV/GISel/RISCVRegisterBanks.td

Removed: 
    


################################################################################
diff  --git a/llvm/lib/CodeGen/MachineVerifier.cpp b/llvm/lib/CodeGen/MachineVerifier.cpp
index 417d9b359591153..729dfc67491ee14 100644
--- a/llvm/lib/CodeGen/MachineVerifier.cpp
+++ b/llvm/lib/CodeGen/MachineVerifier.cpp
@@ -1966,9 +1966,6 @@ void MachineVerifier::visitMachineInstrBefore(const MachineInstr *MI) {
     if (SrcReg.isVirtual() && DstReg.isPhysical() && SrcSize.isScalable() &&
         !DstSize.isScalable())
       break;
-    if (SrcReg.isVirtual() && DstReg.isPhysical() && SrcSize.isScalable() &&
-        !DstSize.isScalable())
-      break;
 
     if (SrcSize.isNonZero() && DstSize.isNonZero() && SrcSize != DstSize) {
       if (!DstOp.getSubReg() && !SrcOp.getSubReg()) {

diff  --git a/llvm/lib/Target/RISCV/GISel/RISCVRegisterBankInfo.cpp b/llvm/lib/Target/RISCV/GISel/RISCVRegisterBankInfo.cpp
index 8c3f9c3cd2e0feb..2899d9f4412809a 100644
--- a/llvm/lib/Target/RISCV/GISel/RISCVRegisterBankInfo.cpp
+++ b/llvm/lib/Target/RISCV/GISel/RISCVRegisterBankInfo.cpp
@@ -100,6 +100,20 @@ RISCVRegisterBankInfo::getRegBankFromRegClass(const TargetRegisterClass &RC,
   case RISCV::FPR64CRegClassID:
   case RISCV::FPR32CRegClassID:
     return getRegBank(RISCV::FPRBRegBankID);
+  case RISCV::VMRegClassID:
+  case RISCV::VRRegClassID:
+  case RISCV::VRNoV0RegClassID:
+  case RISCV::VRM2RegClassID:
+  case RISCV::VRM2NoV0RegClassID:
+  case RISCV::VRM4RegClassID:
+  case RISCV::VRM4NoV0RegClassID:
+  case RISCV::VMV0RegClassID:
+  case RISCV::VRM2_with_sub_vrm1_0_in_VMV0RegClassID:
+  case RISCV::VRM4_with_sub_vrm1_0_in_VMV0RegClassID:
+  case RISCV::VRM8RegClassID:
+  case RISCV::VRM8NoV0RegClassID:
+  case RISCV::VRM8_with_sub_vrm1_0_in_VMV0RegClassID:
+    return getRegBank(RISCV::VRBRegBankID);
   }
 }
 

diff  --git a/llvm/lib/Target/RISCV/GISel/RISCVRegisterBanks.td b/llvm/lib/Target/RISCV/GISel/RISCVRegisterBanks.td
index b0556ec44e8f3dd..b1ef815fe373521 100644
--- a/llvm/lib/Target/RISCV/GISel/RISCVRegisterBanks.td
+++ b/llvm/lib/Target/RISCV/GISel/RISCVRegisterBanks.td
@@ -14,3 +14,7 @@ def GPRBRegBank : RegisterBank<"GPRB", [GPR]>;
 
 /// Floating Point Registers: F.
 def FPRBRegBank : RegisterBank<"FPRB", [FPR64]>;
+
+/// Vector Registers : V.
+def VRBRegBank : RegisterBank<"VRB", [VRM8]>;
+

diff  --git a/llvm/test/CodeGen/RISCV/GlobalISel/regbankselect/vec-args-ret.mir b/llvm/test/CodeGen/RISCV/GlobalISel/regbankselect/vec-args-ret.mir
new file mode 100644
index 000000000000000..e20d5f7fbb74fb7
--- /dev/null
+++ b/llvm/test/CodeGen/RISCV/GlobalISel/regbankselect/vec-args-ret.mir
@@ -0,0 +1,1259 @@
+# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
+# RUN: llc -mtriple=riscv32  -run-pass=regbankselect \
+# RUN:   -disable-gisel-legality-check -simplify-mir -verify-machineinstrs %s \
+# RUN:   -o - | FileCheck -check-prefixes=RV32 %s
+# RUN: llc -mtriple=riscv64  -run-pass=regbankselect \
+# RUN:   -disable-gisel-legality-check -simplify-mir -verify-machineinstrs %s \
+# RUN:   -o - | FileCheck -check-prefixes=RV64 %s
+
+...
+---
+name:            test_ret_nxv1i8
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv1i8
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 1 x s8>) = COPY $v8
+    ; RV32-NEXT: $v8 = COPY [[COPY]](<vscale x 1 x s8>)
+    ; RV32-NEXT: PseudoRET implicit $v8
+    ;
+    ; RV64-LABEL: name: test_ret_nxv1i8
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 1 x s8>) = COPY $v8
+    ; RV64-NEXT: $v8 = COPY [[COPY]](<vscale x 1 x s8>)
+    ; RV64-NEXT: PseudoRET implicit $v8
+    %0:_(<vscale x 1 x s8>) = COPY $v8
+    $v8 = COPY %0(<vscale x 1 x s8>)
+    PseudoRET implicit $v8
+
+...
+---
+name:            test_ret_nxv2i8
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv2i8
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 2 x s8>) = COPY $v8
+    ; RV32-NEXT: $v8 = COPY [[COPY]](<vscale x 2 x s8>)
+    ; RV32-NEXT: PseudoRET implicit $v8
+    ;
+    ; RV64-LABEL: name: test_ret_nxv2i8
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 2 x s8>) = COPY $v8
+    ; RV64-NEXT: $v8 = COPY [[COPY]](<vscale x 2 x s8>)
+    ; RV64-NEXT: PseudoRET implicit $v8
+    %0:_(<vscale x 2 x s8>) = COPY $v8
+    $v8 = COPY %0(<vscale x 2 x s8>)
+    PseudoRET implicit $v8
+
+...
+---
+name:            test_ret_nxv4i8
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv4i8
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 4 x s8>) = COPY $v8
+    ; RV32-NEXT: $v8 = COPY [[COPY]](<vscale x 4 x s8>)
+    ; RV32-NEXT: PseudoRET implicit $v8
+    ;
+    ; RV64-LABEL: name: test_ret_nxv4i8
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 4 x s8>) = COPY $v8
+    ; RV64-NEXT: $v8 = COPY [[COPY]](<vscale x 4 x s8>)
+    ; RV64-NEXT: PseudoRET implicit $v8
+    %0:_(<vscale x 4 x s8>) = COPY $v8
+    $v8 = COPY %0(<vscale x 4 x s8>)
+    PseudoRET implicit $v8
+
+...
+---
+name:            test_ret_nxv8i8
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv8i8
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 8 x s8>) = COPY $v8
+    ; RV32-NEXT: $v8 = COPY [[COPY]](<vscale x 8 x s8>)
+    ; RV32-NEXT: PseudoRET implicit $v8
+    ;
+    ; RV64-LABEL: name: test_ret_nxv8i8
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 8 x s8>) = COPY $v8
+    ; RV64-NEXT: $v8 = COPY [[COPY]](<vscale x 8 x s8>)
+    ; RV64-NEXT: PseudoRET implicit $v8
+    %0:_(<vscale x 8 x s8>) = COPY $v8
+    $v8 = COPY %0(<vscale x 8 x s8>)
+    PseudoRET implicit $v8
+
+...
+---
+name:            test_ret_nxv16i8
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv16i8
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 16 x s8>) = COPY $v8
+    ; RV32-NEXT: $v8m2 = COPY [[COPY]](<vscale x 16 x s8>)
+    ; RV32-NEXT: PseudoRET implicit $v8m2
+    ;
+    ; RV64-LABEL: name: test_ret_nxv16i8
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 16 x s8>) = COPY $v8
+    ; RV64-NEXT: $v8m2 = COPY [[COPY]](<vscale x 16 x s8>)
+    ; RV64-NEXT: PseudoRET implicit $v8m2
+    %0:_(<vscale x 16 x s8>) = COPY $v8
+    $v8m2 = COPY %0(<vscale x 16 x s8>)
+    PseudoRET implicit $v8m2
+
+...
+---
+name:            test_ret_nxv32i8
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv32i8
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 32 x s8>) = COPY $v8
+    ; RV32-NEXT: $v8m4 = COPY [[COPY]](<vscale x 32 x s8>)
+    ; RV32-NEXT: PseudoRET implicit $v8m4
+    ;
+    ; RV64-LABEL: name: test_ret_nxv32i8
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 32 x s8>) = COPY $v8
+    ; RV64-NEXT: $v8m4 = COPY [[COPY]](<vscale x 32 x s8>)
+    ; RV64-NEXT: PseudoRET implicit $v8m4
+    %0:_(<vscale x 32 x s8>) = COPY $v8
+    $v8m4 = COPY %0(<vscale x 32 x s8>)
+    PseudoRET implicit $v8m4
+
+...
+---
+name:            test_ret_nxv64i8
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv64i8
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 64 x s8>) = COPY $v8
+    ; RV32-NEXT: $v8m8 = COPY [[COPY]](<vscale x 64 x s8>)
+    ; RV32-NEXT: PseudoRET implicit $v8m8
+    ;
+    ; RV64-LABEL: name: test_ret_nxv64i8
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 64 x s8>) = COPY $v8
+    ; RV64-NEXT: $v8m8 = COPY [[COPY]](<vscale x 64 x s8>)
+    ; RV64-NEXT: PseudoRET implicit $v8m8
+    %0:_(<vscale x 64 x s8>) = COPY $v8
+    $v8m8 = COPY %0(<vscale x 64 x s8>)
+    PseudoRET implicit $v8m8
+
+...
+---
+name:            test_ret_nxv1i16
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv1i16
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 1 x s16>) = COPY $v8
+    ; RV32-NEXT: $v8 = COPY [[COPY]](<vscale x 1 x s16>)
+    ; RV32-NEXT: PseudoRET implicit $v8
+    ;
+    ; RV64-LABEL: name: test_ret_nxv1i16
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 1 x s16>) = COPY $v8
+    ; RV64-NEXT: $v8 = COPY [[COPY]](<vscale x 1 x s16>)
+    ; RV64-NEXT: PseudoRET implicit $v8
+    %0:_(<vscale x 1 x s16>) = COPY $v8
+    $v8 = COPY %0(<vscale x 1 x s16>)
+    PseudoRET implicit $v8
+
+...
+---
+name:            test_ret_nxv2i16
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv2i16
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 2 x s16>) = COPY $v8
+    ; RV32-NEXT: $v8 = COPY [[COPY]](<vscale x 2 x s16>)
+    ; RV32-NEXT: PseudoRET implicit $v8
+    ;
+    ; RV64-LABEL: name: test_ret_nxv2i16
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 2 x s16>) = COPY $v8
+    ; RV64-NEXT: $v8 = COPY [[COPY]](<vscale x 2 x s16>)
+    ; RV64-NEXT: PseudoRET implicit $v8
+    %0:_(<vscale x 2 x s16>) = COPY $v8
+    $v8 = COPY %0(<vscale x 2 x s16>)
+    PseudoRET implicit $v8
+
+...
+---
+name:            test_ret_nxv4i16
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv4i16
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 4 x s16>) = COPY $v8
+    ; RV32-NEXT: $v8 = COPY [[COPY]](<vscale x 4 x s16>)
+    ; RV32-NEXT: PseudoRET implicit $v8
+    ;
+    ; RV64-LABEL: name: test_ret_nxv4i16
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 4 x s16>) = COPY $v8
+    ; RV64-NEXT: $v8 = COPY [[COPY]](<vscale x 4 x s16>)
+    ; RV64-NEXT: PseudoRET implicit $v8
+    %0:_(<vscale x 4 x s16>) = COPY $v8
+    $v8 = COPY %0(<vscale x 4 x s16>)
+    PseudoRET implicit $v8
+
+...
+---
+name:            test_ret_nxv8i16
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv8i16
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 8 x s16>) = COPY $v8
+    ; RV32-NEXT: $v8m2 = COPY [[COPY]](<vscale x 8 x s16>)
+    ; RV32-NEXT: PseudoRET implicit $v8m2
+    ;
+    ; RV64-LABEL: name: test_ret_nxv8i16
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 8 x s16>) = COPY $v8
+    ; RV64-NEXT: $v8m2 = COPY [[COPY]](<vscale x 8 x s16>)
+    ; RV64-NEXT: PseudoRET implicit $v8m2
+    %0:_(<vscale x 8 x s16>) = COPY $v8
+    $v8m2 = COPY %0(<vscale x 8 x s16>)
+    PseudoRET implicit $v8m2
+
+...
+---
+name:            test_ret_nxv16i16
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv16i16
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 16 x s16>) = COPY $v8
+    ; RV32-NEXT: $v8m4 = COPY [[COPY]](<vscale x 16 x s16>)
+    ; RV32-NEXT: PseudoRET implicit $v8m4
+    ;
+    ; RV64-LABEL: name: test_ret_nxv16i16
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 16 x s16>) = COPY $v8
+    ; RV64-NEXT: $v8m4 = COPY [[COPY]](<vscale x 16 x s16>)
+    ; RV64-NEXT: PseudoRET implicit $v8m4
+    %0:_(<vscale x 16 x s16>) = COPY $v8
+    $v8m4 = COPY %0(<vscale x 16 x s16>)
+    PseudoRET implicit $v8m4
+
+...
+---
+name:            test_ret_nxv32i16
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv32i16
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 32 x s16>) = COPY $v8
+    ; RV32-NEXT: $v8m8 = COPY [[COPY]](<vscale x 32 x s16>)
+    ; RV32-NEXT: PseudoRET implicit $v8m8
+    ;
+    ; RV64-LABEL: name: test_ret_nxv32i16
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 32 x s16>) = COPY $v8
+    ; RV64-NEXT: $v8m8 = COPY [[COPY]](<vscale x 32 x s16>)
+    ; RV64-NEXT: PseudoRET implicit $v8m8
+    %0:_(<vscale x 32 x s16>) = COPY $v8
+    $v8m8 = COPY %0(<vscale x 32 x s16>)
+    PseudoRET implicit $v8m8
+
+...
+---
+name:            test_ret_nxv1i32
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv1i32
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 1 x s32>) = COPY $v8
+    ; RV32-NEXT: $v8 = COPY [[COPY]](<vscale x 1 x s32>)
+    ; RV32-NEXT: PseudoRET implicit $v8
+    ;
+    ; RV64-LABEL: name: test_ret_nxv1i32
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 1 x s32>) = COPY $v8
+    ; RV64-NEXT: $v8 = COPY [[COPY]](<vscale x 1 x s32>)
+    ; RV64-NEXT: PseudoRET implicit $v8
+    %0:_(<vscale x 1 x s32>) = COPY $v8
+    $v8 = COPY %0(<vscale x 1 x s32>)
+    PseudoRET implicit $v8
+
+...
+---
+name:            test_ret_nxv2i32
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv2i32
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 2 x s32>) = COPY $v8
+    ; RV32-NEXT: $v8 = COPY [[COPY]](<vscale x 2 x s32>)
+    ; RV32-NEXT: PseudoRET implicit $v8
+    ;
+    ; RV64-LABEL: name: test_ret_nxv2i32
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 2 x s32>) = COPY $v8
+    ; RV64-NEXT: $v8 = COPY [[COPY]](<vscale x 2 x s32>)
+    ; RV64-NEXT: PseudoRET implicit $v8
+    %0:_(<vscale x 2 x s32>) = COPY $v8
+    $v8 = COPY %0(<vscale x 2 x s32>)
+    PseudoRET implicit $v8
+
+...
+---
+name:            test_ret_nxv4i32
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv4i32
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 4 x s32>) = COPY $v8
+    ; RV32-NEXT: $v8m2 = COPY [[COPY]](<vscale x 4 x s32>)
+    ; RV32-NEXT: PseudoRET implicit $v8m2
+    ;
+    ; RV64-LABEL: name: test_ret_nxv4i32
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 4 x s32>) = COPY $v8
+    ; RV64-NEXT: $v8m2 = COPY [[COPY]](<vscale x 4 x s32>)
+    ; RV64-NEXT: PseudoRET implicit $v8m2
+    %0:_(<vscale x 4 x s32>) = COPY $v8
+    $v8m2 = COPY %0(<vscale x 4 x s32>)
+    PseudoRET implicit $v8m2
+
+...
+---
+name:            test_ret_nxv8i32
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv8i32
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 8 x s32>) = COPY $v8
+    ; RV32-NEXT: $v8m4 = COPY [[COPY]](<vscale x 8 x s32>)
+    ; RV32-NEXT: PseudoRET implicit $v8m4
+    ;
+    ; RV64-LABEL: name: test_ret_nxv8i32
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 8 x s32>) = COPY $v8
+    ; RV64-NEXT: $v8m4 = COPY [[COPY]](<vscale x 8 x s32>)
+    ; RV64-NEXT: PseudoRET implicit $v8m4
+    %0:_(<vscale x 8 x s32>) = COPY $v8
+    $v8m4 = COPY %0(<vscale x 8 x s32>)
+    PseudoRET implicit $v8m4
+
+...
+---
+name:            test_ret_nxv16i32
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv16i32
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 16 x s32>) = COPY $v8
+    ; RV32-NEXT: $v8m8 = COPY [[COPY]](<vscale x 16 x s32>)
+    ; RV32-NEXT: PseudoRET implicit $v8m8
+    ;
+    ; RV64-LABEL: name: test_ret_nxv16i32
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 16 x s32>) = COPY $v8
+    ; RV64-NEXT: $v8m8 = COPY [[COPY]](<vscale x 16 x s32>)
+    ; RV64-NEXT: PseudoRET implicit $v8m8
+    %0:_(<vscale x 16 x s32>) = COPY $v8
+    $v8m8 = COPY %0(<vscale x 16 x s32>)
+    PseudoRET implicit $v8m8
+
+...
+---
+name:            test_ret_nxv1i64
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv1i64
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 1 x s64>) = COPY $v8
+    ; RV32-NEXT: $v8 = COPY [[COPY]](<vscale x 1 x s64>)
+    ; RV32-NEXT: PseudoRET implicit $v8
+    ;
+    ; RV64-LABEL: name: test_ret_nxv1i64
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 1 x s64>) = COPY $v8
+    ; RV64-NEXT: $v8 = COPY [[COPY]](<vscale x 1 x s64>)
+    ; RV64-NEXT: PseudoRET implicit $v8
+    %0:_(<vscale x 1 x s64>) = COPY $v8
+    $v8 = COPY %0(<vscale x 1 x s64>)
+    PseudoRET implicit $v8
+
+...
+---
+name:            test_ret_nxv2i64
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv2i64
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 2 x s64>) = COPY $v8
+    ; RV32-NEXT: $v8m2 = COPY [[COPY]](<vscale x 2 x s64>)
+    ; RV32-NEXT: PseudoRET implicit $v8m2
+    ;
+    ; RV64-LABEL: name: test_ret_nxv2i64
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 2 x s64>) = COPY $v8
+    ; RV64-NEXT: $v8m2 = COPY [[COPY]](<vscale x 2 x s64>)
+    ; RV64-NEXT: PseudoRET implicit $v8m2
+    %0:_(<vscale x 2 x s64>) = COPY $v8
+    $v8m2 = COPY %0(<vscale x 2 x s64>)
+    PseudoRET implicit $v8m2
+
+...
+---
+name:            test_ret_nxv4i64
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv4i64
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 4 x s64>) = COPY $v8
+    ; RV32-NEXT: $v8m4 = COPY [[COPY]](<vscale x 4 x s64>)
+    ; RV32-NEXT: PseudoRET implicit $v8m4
+    ;
+    ; RV64-LABEL: name: test_ret_nxv4i64
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 4 x s64>) = COPY $v8
+    ; RV64-NEXT: $v8m4 = COPY [[COPY]](<vscale x 4 x s64>)
+    ; RV64-NEXT: PseudoRET implicit $v8m4
+    %0:_(<vscale x 4 x s64>) = COPY $v8
+    $v8m4 = COPY %0(<vscale x 4 x s64>)
+    PseudoRET implicit $v8m4
+
+...
+---
+name:            test_ret_nxv8i64
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv8i64
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 8 x s64>) = COPY $v8
+    ; RV32-NEXT: $v8m8 = COPY [[COPY]](<vscale x 8 x s64>)
+    ; RV32-NEXT: PseudoRET implicit $v8m8
+    ;
+    ; RV64-LABEL: name: test_ret_nxv8i64
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 8 x s64>) = COPY $v8
+    ; RV64-NEXT: $v8m8 = COPY [[COPY]](<vscale x 8 x s64>)
+    ; RV64-NEXT: PseudoRET implicit $v8m8
+    %0:_(<vscale x 8 x s64>) = COPY $v8
+    $v8m8 = COPY %0(<vscale x 8 x s64>)
+    PseudoRET implicit $v8m8
+
+...
+---
+name:            test_ret_nxv64i1
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv64i1
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 64 x s1>) = COPY $v8
+    ; RV32-NEXT: $v8 = COPY [[COPY]](<vscale x 64 x s1>)
+    ; RV32-NEXT: PseudoRET implicit $v8
+    ;
+    ; RV64-LABEL: name: test_ret_nxv64i1
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 64 x s1>) = COPY $v8
+    ; RV64-NEXT: $v8 = COPY [[COPY]](<vscale x 64 x s1>)
+    ; RV64-NEXT: PseudoRET implicit $v8
+    %0:_(<vscale x 64 x s1>) = COPY $v8
+    $v8 = COPY %0(<vscale x 64 x s1>)
+    PseudoRET implicit $v8
+
+...
+---
+name:            test_ret_nxv32i1
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv32i1
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 32 x s1>) = COPY $v8
+    ; RV32-NEXT: $v8 = COPY [[COPY]](<vscale x 32 x s1>)
+    ; RV32-NEXT: PseudoRET implicit $v8
+    ;
+    ; RV64-LABEL: name: test_ret_nxv32i1
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 32 x s1>) = COPY $v8
+    ; RV64-NEXT: $v8 = COPY [[COPY]](<vscale x 32 x s1>)
+    ; RV64-NEXT: PseudoRET implicit $v8
+    %0:_(<vscale x 32 x s1>) = COPY $v8
+    $v8 = COPY %0(<vscale x 32 x s1>)
+    PseudoRET implicit $v8
+
+...
+---
+name:            test_ret_nxv16i1
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv16i1
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 16 x s1>) = COPY $v8
+    ; RV32-NEXT: $v8 = COPY [[COPY]](<vscale x 16 x s1>)
+    ; RV32-NEXT: PseudoRET implicit $v8
+    ;
+    ; RV64-LABEL: name: test_ret_nxv16i1
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 16 x s1>) = COPY $v8
+    ; RV64-NEXT: $v8 = COPY [[COPY]](<vscale x 16 x s1>)
+    ; RV64-NEXT: PseudoRET implicit $v8
+    %0:_(<vscale x 16 x s1>) = COPY $v8
+    $v8 = COPY %0(<vscale x 16 x s1>)
+    PseudoRET implicit $v8
+
+...
+---
+name:            test_ret_nxv8i1
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv8i1
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 8 x s1>) = COPY $v8
+    ; RV32-NEXT: $v8 = COPY [[COPY]](<vscale x 8 x s1>)
+    ; RV32-NEXT: PseudoRET implicit $v8
+    ;
+    ; RV64-LABEL: name: test_ret_nxv8i1
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 8 x s1>) = COPY $v8
+    ; RV64-NEXT: $v8 = COPY [[COPY]](<vscale x 8 x s1>)
+    ; RV64-NEXT: PseudoRET implicit $v8
+    %0:_(<vscale x 8 x s1>) = COPY $v8
+    $v8 = COPY %0(<vscale x 8 x s1>)
+    PseudoRET implicit $v8
+
+...
+---
+name:            test_ret_nxv4i1
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv4i1
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 4 x s1>) = COPY $v8
+    ; RV32-NEXT: $v8 = COPY [[COPY]](<vscale x 4 x s1>)
+    ; RV32-NEXT: PseudoRET implicit $v8
+    ;
+    ; RV64-LABEL: name: test_ret_nxv4i1
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 4 x s1>) = COPY $v8
+    ; RV64-NEXT: $v8 = COPY [[COPY]](<vscale x 4 x s1>)
+    ; RV64-NEXT: PseudoRET implicit $v8
+    %0:_(<vscale x 4 x s1>) = COPY $v8
+    $v8 = COPY %0(<vscale x 4 x s1>)
+    PseudoRET implicit $v8
+
+...
+---
+name:            test_ret_nxv2i1
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv2i1
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 2 x s1>) = COPY $v8
+    ; RV32-NEXT: $v8 = COPY [[COPY]](<vscale x 2 x s1>)
+    ; RV32-NEXT: PseudoRET implicit $v8
+    ;
+    ; RV64-LABEL: name: test_ret_nxv2i1
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 2 x s1>) = COPY $v8
+    ; RV64-NEXT: $v8 = COPY [[COPY]](<vscale x 2 x s1>)
+    ; RV64-NEXT: PseudoRET implicit $v8
+    %0:_(<vscale x 2 x s1>) = COPY $v8
+    $v8 = COPY %0(<vscale x 2 x s1>)
+    PseudoRET implicit $v8
+
+...
+---
+name:            test_ret_nxv1i1
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv1i1
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 1 x s1>) = COPY $v8
+    ; RV32-NEXT: $v8 = COPY [[COPY]](<vscale x 1 x s1>)
+    ; RV32-NEXT: PseudoRET implicit $v8
+    ;
+    ; RV64-LABEL: name: test_ret_nxv1i1
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 1 x s1>) = COPY $v8
+    ; RV64-NEXT: $v8 = COPY [[COPY]](<vscale x 1 x s1>)
+    ; RV64-NEXT: PseudoRET implicit $v8
+    %0:_(<vscale x 1 x s1>) = COPY $v8
+    $v8 = COPY %0(<vscale x 1 x s1>)
+    PseudoRET implicit $v8
+
+...
+---
+name:            test_ret_nxv1f32
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv1f32
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 1 x s32>) = COPY $v8
+    ; RV32-NEXT: $v8 = COPY [[COPY]](<vscale x 1 x s32>)
+    ; RV32-NEXT: PseudoRET implicit $v8
+    ;
+    ; RV64-LABEL: name: test_ret_nxv1f32
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 1 x s32>) = COPY $v8
+    ; RV64-NEXT: $v8 = COPY [[COPY]](<vscale x 1 x s32>)
+    ; RV64-NEXT: PseudoRET implicit $v8
+    %0:_(<vscale x 1 x s32>) = COPY $v8
+    $v8 = COPY %0(<vscale x 1 x s32>)
+    PseudoRET implicit $v8
+
+...
+---
+name:            test_ret_nxv2f32
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv2f32
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 2 x s32>) = COPY $v8
+    ; RV32-NEXT: $v8 = COPY [[COPY]](<vscale x 2 x s32>)
+    ; RV32-NEXT: PseudoRET implicit $v8
+    ;
+    ; RV64-LABEL: name: test_ret_nxv2f32
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 2 x s32>) = COPY $v8
+    ; RV64-NEXT: $v8 = COPY [[COPY]](<vscale x 2 x s32>)
+    ; RV64-NEXT: PseudoRET implicit $v8
+    %0:_(<vscale x 2 x s32>) = COPY $v8
+    $v8 = COPY %0(<vscale x 2 x s32>)
+    PseudoRET implicit $v8
+
+...
+---
+name:            test_ret_nxv4f32
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv4f32
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 4 x s32>) = COPY $v8
+    ; RV32-NEXT: $v8m2 = COPY [[COPY]](<vscale x 4 x s32>)
+    ; RV32-NEXT: PseudoRET implicit $v8m2
+    ;
+    ; RV64-LABEL: name: test_ret_nxv4f32
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 4 x s32>) = COPY $v8
+    ; RV64-NEXT: $v8m2 = COPY [[COPY]](<vscale x 4 x s32>)
+    ; RV64-NEXT: PseudoRET implicit $v8m2
+    %0:_(<vscale x 4 x s32>) = COPY $v8
+    $v8m2 = COPY %0(<vscale x 4 x s32>)
+    PseudoRET implicit $v8m2
+
+...
+---
+name:            test_ret_nxv8f32
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv8f32
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 8 x s32>) = COPY $v8
+    ; RV32-NEXT: $v8m4 = COPY [[COPY]](<vscale x 8 x s32>)
+    ; RV32-NEXT: PseudoRET implicit $v8m4
+    ;
+    ; RV64-LABEL: name: test_ret_nxv8f32
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 8 x s32>) = COPY $v8
+    ; RV64-NEXT: $v8m4 = COPY [[COPY]](<vscale x 8 x s32>)
+    ; RV64-NEXT: PseudoRET implicit $v8m4
+    %0:_(<vscale x 8 x s32>) = COPY $v8
+    $v8m4 = COPY %0(<vscale x 8 x s32>)
+    PseudoRET implicit $v8m4
+
+...
+---
+name:            test_ret_nxv16f32
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv16f32
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 16 x s32>) = COPY $v8
+    ; RV32-NEXT: $v8m8 = COPY [[COPY]](<vscale x 16 x s32>)
+    ; RV32-NEXT: PseudoRET implicit $v8m8
+    ;
+    ; RV64-LABEL: name: test_ret_nxv16f32
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 16 x s32>) = COPY $v8
+    ; RV64-NEXT: $v8m8 = COPY [[COPY]](<vscale x 16 x s32>)
+    ; RV64-NEXT: PseudoRET implicit $v8m8
+    %0:_(<vscale x 16 x s32>) = COPY $v8
+    $v8m8 = COPY %0(<vscale x 16 x s32>)
+    PseudoRET implicit $v8m8
+
+...
+---
+name:            test_ret_nxv1f64
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv1f64
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 1 x s64>) = COPY $v8
+    ; RV32-NEXT: $v8 = COPY [[COPY]](<vscale x 1 x s64>)
+    ; RV32-NEXT: PseudoRET implicit $v8
+    ;
+    ; RV64-LABEL: name: test_ret_nxv1f64
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 1 x s64>) = COPY $v8
+    ; RV64-NEXT: $v8 = COPY [[COPY]](<vscale x 1 x s64>)
+    ; RV64-NEXT: PseudoRET implicit $v8
+    %0:_(<vscale x 1 x s64>) = COPY $v8
+    $v8 = COPY %0(<vscale x 1 x s64>)
+    PseudoRET implicit $v8
+
+...
+---
+name:            test_ret_nxv2f64
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv2f64
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 2 x s64>) = COPY $v8
+    ; RV32-NEXT: $v8m2 = COPY [[COPY]](<vscale x 2 x s64>)
+    ; RV32-NEXT: PseudoRET implicit $v8m2
+    ;
+    ; RV64-LABEL: name: test_ret_nxv2f64
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 2 x s64>) = COPY $v8
+    ; RV64-NEXT: $v8m2 = COPY [[COPY]](<vscale x 2 x s64>)
+    ; RV64-NEXT: PseudoRET implicit $v8m2
+    %0:_(<vscale x 2 x s64>) = COPY $v8
+    $v8m2 = COPY %0(<vscale x 2 x s64>)
+    PseudoRET implicit $v8m2
+
+...
+---
+name:            test_ret_nxv4f64
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv4f64
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 4 x s64>) = COPY $v8
+    ; RV32-NEXT: $v8m4 = COPY [[COPY]](<vscale x 4 x s64>)
+    ; RV32-NEXT: PseudoRET implicit $v8m4
+    ;
+    ; RV64-LABEL: name: test_ret_nxv4f64
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 4 x s64>) = COPY $v8
+    ; RV64-NEXT: $v8m4 = COPY [[COPY]](<vscale x 4 x s64>)
+    ; RV64-NEXT: PseudoRET implicit $v8m4
+    %0:_(<vscale x 4 x s64>) = COPY $v8
+    $v8m4 = COPY %0(<vscale x 4 x s64>)
+    PseudoRET implicit $v8m4
+
+...
+---
+name:            test_ret_nxv8f64
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv8f64
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 8 x s64>) = COPY $v8
+    ; RV32-NEXT: $v8m8 = COPY [[COPY]](<vscale x 8 x s64>)
+    ; RV32-NEXT: PseudoRET implicit $v8m8
+    ;
+    ; RV64-LABEL: name: test_ret_nxv8f64
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 8 x s64>) = COPY $v8
+    ; RV64-NEXT: $v8m8 = COPY [[COPY]](<vscale x 8 x s64>)
+    ; RV64-NEXT: PseudoRET implicit $v8m8
+    %0:_(<vscale x 8 x s64>) = COPY $v8
+    $v8m8 = COPY %0(<vscale x 8 x s64>)
+    PseudoRET implicit $v8m8
+
+...
+---
+name:            test_ret_nxv1f16
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv1f16
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 1 x s16>) = COPY $v8
+    ; RV32-NEXT: $v8 = COPY [[COPY]](<vscale x 1 x s16>)
+    ; RV32-NEXT: PseudoRET implicit $v8
+    ;
+    ; RV64-LABEL: name: test_ret_nxv1f16
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 1 x s16>) = COPY $v8
+    ; RV64-NEXT: $v8 = COPY [[COPY]](<vscale x 1 x s16>)
+    ; RV64-NEXT: PseudoRET implicit $v8
+    %0:_(<vscale x 1 x s16>) = COPY $v8
+    $v8 = COPY %0(<vscale x 1 x s16>)
+    PseudoRET implicit $v8
+
+...
+---
+name:            test_ret_nxv2f16
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv2f16
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 2 x s16>) = COPY $v8
+    ; RV32-NEXT: $v8 = COPY [[COPY]](<vscale x 2 x s16>)
+    ; RV32-NEXT: PseudoRET implicit $v8
+    ;
+    ; RV64-LABEL: name: test_ret_nxv2f16
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 2 x s16>) = COPY $v8
+    ; RV64-NEXT: $v8 = COPY [[COPY]](<vscale x 2 x s16>)
+    ; RV64-NEXT: PseudoRET implicit $v8
+    %0:_(<vscale x 2 x s16>) = COPY $v8
+    $v8 = COPY %0(<vscale x 2 x s16>)
+    PseudoRET implicit $v8
+
+...
+---
+name:            test_ret_nxv4f16
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv4f16
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 4 x s16>) = COPY $v8
+    ; RV32-NEXT: $v8 = COPY [[COPY]](<vscale x 4 x s16>)
+    ; RV32-NEXT: PseudoRET implicit $v8
+    ;
+    ; RV64-LABEL: name: test_ret_nxv4f16
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 4 x s16>) = COPY $v8
+    ; RV64-NEXT: $v8 = COPY [[COPY]](<vscale x 4 x s16>)
+    ; RV64-NEXT: PseudoRET implicit $v8
+    %0:_(<vscale x 4 x s16>) = COPY $v8
+    $v8 = COPY %0(<vscale x 4 x s16>)
+    PseudoRET implicit $v8
+
+...
+---
+name:            test_ret_nxv8f16
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv8f16
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 8 x s16>) = COPY $v8
+    ; RV32-NEXT: $v8m2 = COPY [[COPY]](<vscale x 8 x s16>)
+    ; RV32-NEXT: PseudoRET implicit $v8m2
+    ;
+    ; RV64-LABEL: name: test_ret_nxv8f16
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 8 x s16>) = COPY $v8
+    ; RV64-NEXT: $v8m2 = COPY [[COPY]](<vscale x 8 x s16>)
+    ; RV64-NEXT: PseudoRET implicit $v8m2
+    %0:_(<vscale x 8 x s16>) = COPY $v8
+    $v8m2 = COPY %0(<vscale x 8 x s16>)
+    PseudoRET implicit $v8m2
+
+...
+---
+name:            test_ret_nxv16f16
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv16f16
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 16 x s16>) = COPY $v8
+    ; RV32-NEXT: $v8m4 = COPY [[COPY]](<vscale x 16 x s16>)
+    ; RV32-NEXT: PseudoRET implicit $v8m4
+    ;
+    ; RV64-LABEL: name: test_ret_nxv16f16
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 16 x s16>) = COPY $v8
+    ; RV64-NEXT: $v8m4 = COPY [[COPY]](<vscale x 16 x s16>)
+    ; RV64-NEXT: PseudoRET implicit $v8m4
+    %0:_(<vscale x 16 x s16>) = COPY $v8
+    $v8m4 = COPY %0(<vscale x 16 x s16>)
+    PseudoRET implicit $v8m4
+
+...
+---
+name:            test_ret_nxv32f16
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv32f16
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 32 x s16>) = COPY $v8
+    ; RV32-NEXT: $v8m8 = COPY [[COPY]](<vscale x 32 x s16>)
+    ; RV32-NEXT: PseudoRET implicit $v8m8
+    ;
+    ; RV64-LABEL: name: test_ret_nxv32f16
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 32 x s16>) = COPY $v8
+    ; RV64-NEXT: $v8m8 = COPY [[COPY]](<vscale x 32 x s16>)
+    ; RV64-NEXT: PseudoRET implicit $v8m8
+    %0:_(<vscale x 32 x s16>) = COPY $v8
+    $v8m8 = COPY %0(<vscale x 32 x s16>)
+    PseudoRET implicit $v8m8
+
+...
+---
+name:            test_ret_nxv1b16
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv1b16
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 1 x s16>) = COPY $v8
+    ; RV32-NEXT: $v8 = COPY [[COPY]](<vscale x 1 x s16>)
+    ; RV32-NEXT: PseudoRET implicit $v8
+    ;
+    ; RV64-LABEL: name: test_ret_nxv1b16
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 1 x s16>) = COPY $v8
+    ; RV64-NEXT: $v8 = COPY [[COPY]](<vscale x 1 x s16>)
+    ; RV64-NEXT: PseudoRET implicit $v8
+    %0:_(<vscale x 1 x s16>) = COPY $v8
+    $v8 = COPY %0(<vscale x 1 x s16>)
+    PseudoRET implicit $v8
+
+...
+---
+name:            test_ret_nxv2b16
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv2b16
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 2 x s16>) = COPY $v8
+    ; RV32-NEXT: $v8 = COPY [[COPY]](<vscale x 2 x s16>)
+    ; RV32-NEXT: PseudoRET implicit $v8
+    ;
+    ; RV64-LABEL: name: test_ret_nxv2b16
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 2 x s16>) = COPY $v8
+    ; RV64-NEXT: $v8 = COPY [[COPY]](<vscale x 2 x s16>)
+    ; RV64-NEXT: PseudoRET implicit $v8
+    %0:_(<vscale x 2 x s16>) = COPY $v8
+    $v8 = COPY %0(<vscale x 2 x s16>)
+    PseudoRET implicit $v8
+
+...
+---
+name:            test_ret_nxv4b16
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv4b16
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 4 x s16>) = COPY $v8
+    ; RV32-NEXT: $v8 = COPY [[COPY]](<vscale x 4 x s16>)
+    ; RV32-NEXT: PseudoRET implicit $v8
+    ;
+    ; RV64-LABEL: name: test_ret_nxv4b16
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 4 x s16>) = COPY $v8
+    ; RV64-NEXT: $v8 = COPY [[COPY]](<vscale x 4 x s16>)
+    ; RV64-NEXT: PseudoRET implicit $v8
+    %0:_(<vscale x 4 x s16>) = COPY $v8
+    $v8 = COPY %0(<vscale x 4 x s16>)
+    PseudoRET implicit $v8
+
+...
+---
+name:            test_ret_nxv8b16
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv8b16
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 8 x s16>) = COPY $v8
+    ; RV32-NEXT: $v8m2 = COPY [[COPY]](<vscale x 8 x s16>)
+    ; RV32-NEXT: PseudoRET implicit $v8m2
+    ;
+    ; RV64-LABEL: name: test_ret_nxv8b16
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 8 x s16>) = COPY $v8
+    ; RV64-NEXT: $v8m2 = COPY [[COPY]](<vscale x 8 x s16>)
+    ; RV64-NEXT: PseudoRET implicit $v8m2
+    %0:_(<vscale x 8 x s16>) = COPY $v8
+    $v8m2 = COPY %0(<vscale x 8 x s16>)
+    PseudoRET implicit $v8m2
+
+...
+---
+name:            test_ret_nxv16b16
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv16b16
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 16 x s16>) = COPY $v8
+    ; RV32-NEXT: $v8m4 = COPY [[COPY]](<vscale x 16 x s16>)
+    ; RV32-NEXT: PseudoRET implicit $v8m4
+    ;
+    ; RV64-LABEL: name: test_ret_nxv16b16
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 16 x s16>) = COPY $v8
+    ; RV64-NEXT: $v8m4 = COPY [[COPY]](<vscale x 16 x s16>)
+    ; RV64-NEXT: PseudoRET implicit $v8m4
+    %0:_(<vscale x 16 x s16>) = COPY $v8
+    $v8m4 = COPY %0(<vscale x 16 x s16>)
+    PseudoRET implicit $v8m4
+
+...
+---
+name:            test_ret_nxv32b16
+legalized:         true
+tracksRegLiveness: true
+body:             |
+  bb.1.entry:
+    liveins: $v8
+    ; RV32-LABEL: name: test_ret_nxv32b16
+    ; RV32: liveins: $v8
+    ; RV32-NEXT: {{  $}}
+    ; RV32-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 32 x s16>) = COPY $v8
+    ; RV32-NEXT: $v8m8 = COPY [[COPY]](<vscale x 32 x s16>)
+    ; RV32-NEXT: PseudoRET implicit $v8m8
+    ;
+    ; RV64-LABEL: name: test_ret_nxv32b16
+    ; RV64: liveins: $v8
+    ; RV64-NEXT: {{  $}}
+    ; RV64-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 32 x s16>) = COPY $v8
+    ; RV64-NEXT: $v8m8 = COPY [[COPY]](<vscale x 32 x s16>)
+    ; RV64-NEXT: PseudoRET implicit $v8m8
+    %0:_(<vscale x 32 x s16>) = COPY $v8
+    $v8m8 = COPY %0(<vscale x 32 x s16>)
+    PseudoRET implicit $v8m8
+
+...


        


More information about the llvm-commits mailing list