[llvm] 772b58a - [SelectionDAG][RISCV] Don't unroll 0/1-type bool VSELECTs

Fraser Cormack via llvm-commits llvm-commits at lists.llvm.org
Thu May 27 02:17:02 PDT 2021


Author: Fraser Cormack
Date: 2021-05-27T10:08:57+01:00
New Revision: 772b58a641affcd786e3062250a0f51acb8b1553

URL: https://github.com/llvm/llvm-project/commit/772b58a641affcd786e3062250a0f51acb8b1553
DIFF: https://github.com/llvm/llvm-project/commit/772b58a641affcd786e3062250a0f51acb8b1553.diff

LOG: [SelectionDAG][RISCV] Don't unroll 0/1-type bool VSELECTs

This patch extends the cases in which the legalizer is able to express
VSELECT in terms of XOR/AND/OR. When dealing with a VSELECT between
boolean vector types, the mask itself is an all-ones or all-ones value
of the operand type, so a 0/1 boolean type behaves identically to a 0/-1
type.

This greatly helps RISC-V which relies on expansion for these nodes. It
also allows scalable-vector bool VSELECTs to use the default expansion,
where before it would crash in SelectionDAG::UnrollVectorOp.

Reviewed By: craig.topper

Differential Revision: https://reviews.llvm.org/D103147

Added: 
    llvm/test/CodeGen/RISCV/rvv/vselect-mask.ll

Modified: 
    llvm/lib/CodeGen/SelectionDAG/LegalizeVectorOps.cpp
    llvm/lib/Target/RISCV/RISCVISelLowering.cpp
    llvm/test/CodeGen/RISCV/rvv/fixed-vectors-vselect.ll

Removed: 
    


################################################################################
diff  --git a/llvm/lib/CodeGen/SelectionDAG/LegalizeVectorOps.cpp b/llvm/lib/CodeGen/SelectionDAG/LegalizeVectorOps.cpp
index 581708112e896..0aa2660c7c71b 100644
--- a/llvm/lib/CodeGen/SelectionDAG/LegalizeVectorOps.cpp
+++ b/llvm/lib/CodeGen/SelectionDAG/LegalizeVectorOps.cpp
@@ -1179,14 +1179,19 @@ SDValue VectorLegalizer::ExpandVSELECT(SDNode *Node) {
   // AND,OR,XOR, we will have to scalarize the op.
   // Notice that the operation may be 'promoted' which means that it is
   // 'bitcasted' to another type which is handled.
-  // This operation also isn't safe with AND, OR, XOR when the boolean
-  // type is 0/1 as we need an all ones vector constant to mask with.
-  // FIXME: Sign extend 1 to all ones if thats legal on the target.
   if (TLI.getOperationAction(ISD::AND, VT) == TargetLowering::Expand ||
       TLI.getOperationAction(ISD::XOR, VT) == TargetLowering::Expand ||
-      TLI.getOperationAction(ISD::OR, VT) == TargetLowering::Expand ||
-      TLI.getBooleanContents(Op1.getValueType()) !=
-          TargetLowering::ZeroOrNegativeOneBooleanContent)
+      TLI.getOperationAction(ISD::OR, VT) == TargetLowering::Expand)
+    return DAG.UnrollVectorOp(Node);
+
+  // This operation also isn't safe with AND, OR, XOR when the boolean type is
+  // 0/1 and the select operands aren't also booleans, as we need an all-ones
+  // vector constant to mask with.
+  // FIXME: Sign extend 1 to all ones if that's legal on the target.
+  auto BoolContents = TLI.getBooleanContents(Op1.getValueType());
+  if (BoolContents != TargetLowering::ZeroOrNegativeOneBooleanContent &&
+      !(BoolContents == TargetLowering::ZeroOrOneBooleanContent &&
+        Op1.getValueType().getVectorElementType() == MVT::i1))
     return DAG.UnrollVectorOp(Node);
 
   // If the mask and the type are 
diff erent sizes, unroll the vector op. This

diff  --git a/llvm/lib/Target/RISCV/RISCVISelLowering.cpp b/llvm/lib/Target/RISCV/RISCVISelLowering.cpp
index a840a5e811f41..0775033c1f81b 100644
--- a/llvm/lib/Target/RISCV/RISCVISelLowering.cpp
+++ b/llvm/lib/Target/RISCV/RISCVISelLowering.cpp
@@ -445,6 +445,7 @@ RISCVTargetLowering::RISCVTargetLowering(const TargetMachine &TM,
 
       setOperationAction(ISD::SELECT, VT, Expand);
       setOperationAction(ISD::SELECT_CC, VT, Expand);
+      setOperationAction(ISD::VSELECT, VT, Expand);
 
       setOperationAction(ISD::VECREDUCE_AND, VT, Custom);
       setOperationAction(ISD::VECREDUCE_OR, VT, Custom);

diff  --git a/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-vselect.ll b/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-vselect.ll
index 193ebdb00b939..22e242aac68c3 100644
--- a/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-vselect.ll
+++ b/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-vselect.ll
@@ -1,6 +1,6 @@
 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
-; RUN: llc -mtriple=riscv32 -target-abi=ilp32d -mattr=+experimental-v,+experimental-zfh,+f,+d -riscv-v-vector-bits-min=128 -verify-machineinstrs < %s | FileCheck %s --check-prefixes=CHECK,RV32
-; RUN: llc -mtriple=riscv64 -target-abi=lp64d -mattr=+experimental-v,+experimental-zfh,+f,+d -riscv-v-vector-bits-min=128 -verify-machineinstrs < %s | FileCheck %s --check-prefixes=CHECK,RV64
+; RUN: llc -mtriple=riscv32 -target-abi=ilp32d -mattr=+experimental-v,+experimental-zfh,+f,+d -riscv-v-vector-bits-min=128 -verify-machineinstrs < %s | FileCheck %s
+; RUN: llc -mtriple=riscv64 -target-abi=lp64d -mattr=+experimental-v,+experimental-zfh,+f,+d -riscv-v-vector-bits-min=128 -verify-machineinstrs < %s | FileCheck %s
 
 define void @vselect_vv_v8i32(<8 x i32>* %a, <8 x i32>* %b, <8 x i1>* %cc, <8 x i32>* %z) {
 ; CHECK-LABEL: vselect_vv_v8i32:
@@ -248,41 +248,10 @@ define void @vselect_vfpzero_v32f16(<32 x half>* %b, <32 x i1>* %cc, <32 x half>
 define <2 x i1> @vselect_v2i1(<2 x i1> %a, <2 x i1> %b, <2 x i1> %cc) {
 ; CHECK-LABEL: vselect_v2i1:
 ; CHECK:       # %bb.0:
-; CHECK-NEXT:    vmv1r.v v28, v0
 ; CHECK-NEXT:    vsetivli zero, 2, e8,mf8,ta,mu
-; CHECK-NEXT:    vmv.v.i v26, 0
-; CHECK-NEXT:    vmv1r.v v0, v8
-; CHECK-NEXT:    vmerge.vim v25, v26, 1, v0
-; CHECK-NEXT:    vmv1r.v v0, v9
-; CHECK-NEXT:    vmerge.vim v27, v26, 1, v0
-; CHECK-NEXT:    vmv.x.s a0, v27
-; CHECK-NEXT:    andi a0, a0, 1
-; CHECK-NEXT:    vmv1r.v v0, v28
-; CHECK-NEXT:    vmerge.vim v26, v26, 1, v0
-; CHECK-NEXT:    bnez a0, .LBB12_2
-; CHECK-NEXT:  # %bb.1:
-; CHECK-NEXT:    vmv.x.s a0, v25
-; CHECK-NEXT:    j .LBB12_3
-; CHECK-NEXT:  .LBB12_2:
-; CHECK-NEXT:    vmv.x.s a0, v26
-; CHECK-NEXT:  .LBB12_3:
-; CHECK-NEXT:    vsetivli zero, 1, e8,mf8,ta,mu
-; CHECK-NEXT:    vslidedown.vi v27, v27, 1
-; CHECK-NEXT:    vmv.x.s a1, v27
-; CHECK-NEXT:    andi a1, a1, 1
-; CHECK-NEXT:    bnez a1, .LBB12_5
-; CHECK-NEXT:  # %bb.4:
-; CHECK-NEXT:    vslidedown.vi v25, v25, 1
-; CHECK-NEXT:    j .LBB12_6
-; CHECK-NEXT:  .LBB12_5:
-; CHECK-NEXT:    vslidedown.vi v25, v26, 1
-; CHECK-NEXT:  .LBB12_6:
-; CHECK-NEXT:    vmv.x.s a1, v25
-; CHECK-NEXT:    vsetivli zero, 2, e8,mf8,ta,mu
-; CHECK-NEXT:    vmv.v.x v25, a1
-; CHECK-NEXT:    vmv.s.x v25, a0
-; CHECK-NEXT:    vand.vi v25, v25, 1
-; CHECK-NEXT:    vmsne.vi v0, v25, 0
+; CHECK-NEXT:    vmandnot.mm v25, v8, v9
+; CHECK-NEXT:    vmand.mm v26, v0, v9
+; CHECK-NEXT:    vmor.mm v0, v26, v25
 ; CHECK-NEXT:    ret
   %v = select <2 x i1> %cc, <2 x i1> %a, <2 x i1> %b
   ret <2 x i1> %v
@@ -291,70 +260,10 @@ define <2 x i1> @vselect_v2i1(<2 x i1> %a, <2 x i1> %b, <2 x i1> %cc) {
 define <4 x i1> @vselect_v4i1(<4 x i1> %a, <4 x i1> %b, <4 x i1> %cc) {
 ; CHECK-LABEL: vselect_v4i1:
 ; CHECK:       # %bb.0:
-; CHECK-NEXT:    addi sp, sp, -16
-; CHECK-NEXT:    .cfi_def_cfa_offset 16
-; CHECK-NEXT:    vmv1r.v v28, v0
-; CHECK-NEXT:    vsetivli zero, 4, e8,mf4,ta,mu
-; CHECK-NEXT:    vmv.v.i v26, 0
-; CHECK-NEXT:    vmv1r.v v0, v8
-; CHECK-NEXT:    vmerge.vim v25, v26, 1, v0
-; CHECK-NEXT:    vmv1r.v v0, v9
-; CHECK-NEXT:    vmerge.vim v27, v26, 1, v0
-; CHECK-NEXT:    vmv.x.s a0, v27
-; CHECK-NEXT:    andi a0, a0, 1
-; CHECK-NEXT:    vmv1r.v v0, v28
-; CHECK-NEXT:    vmerge.vim v26, v26, 1, v0
-; CHECK-NEXT:    bnez a0, .LBB13_2
-; CHECK-NEXT:  # %bb.1:
-; CHECK-NEXT:    vmv.x.s a0, v25
-; CHECK-NEXT:    j .LBB13_3
-; CHECK-NEXT:  .LBB13_2:
-; CHECK-NEXT:    vmv.x.s a0, v26
-; CHECK-NEXT:  .LBB13_3:
-; CHECK-NEXT:    sb a0, 12(sp)
-; CHECK-NEXT:    vsetivli zero, 1, e8,mf4,ta,mu
-; CHECK-NEXT:    vslidedown.vi v28, v27, 3
-; CHECK-NEXT:    vmv.x.s a0, v28
-; CHECK-NEXT:    andi a0, a0, 1
-; CHECK-NEXT:    bnez a0, .LBB13_5
-; CHECK-NEXT:  # %bb.4:
-; CHECK-NEXT:    vslidedown.vi v28, v25, 3
-; CHECK-NEXT:    j .LBB13_6
-; CHECK-NEXT:  .LBB13_5:
-; CHECK-NEXT:    vslidedown.vi v28, v26, 3
-; CHECK-NEXT:  .LBB13_6:
-; CHECK-NEXT:    vmv.x.s a0, v28
-; CHECK-NEXT:    vslidedown.vi v28, v27, 2
-; CHECK-NEXT:    vmv.x.s a1, v28
-; CHECK-NEXT:    andi a1, a1, 1
-; CHECK-NEXT:    sb a0, 15(sp)
-; CHECK-NEXT:    bnez a1, .LBB13_8
-; CHECK-NEXT:  # %bb.7:
-; CHECK-NEXT:    vslidedown.vi v28, v25, 2
-; CHECK-NEXT:    j .LBB13_9
-; CHECK-NEXT:  .LBB13_8:
-; CHECK-NEXT:    vslidedown.vi v28, v26, 2
-; CHECK-NEXT:  .LBB13_9:
-; CHECK-NEXT:    vmv.x.s a0, v28
-; CHECK-NEXT:    vslidedown.vi v27, v27, 1
-; CHECK-NEXT:    vmv.x.s a1, v27
-; CHECK-NEXT:    andi a1, a1, 1
-; CHECK-NEXT:    sb a0, 14(sp)
-; CHECK-NEXT:    bnez a1, .LBB13_11
-; CHECK-NEXT:  # %bb.10:
-; CHECK-NEXT:    vslidedown.vi v25, v25, 1
-; CHECK-NEXT:    j .LBB13_12
-; CHECK-NEXT:  .LBB13_11:
-; CHECK-NEXT:    vslidedown.vi v25, v26, 1
-; CHECK-NEXT:  .LBB13_12:
-; CHECK-NEXT:    vmv.x.s a0, v25
-; CHECK-NEXT:    sb a0, 13(sp)
 ; CHECK-NEXT:    vsetivli zero, 4, e8,mf4,ta,mu
-; CHECK-NEXT:    addi a0, sp, 12
-; CHECK-NEXT:    vle8.v v25, (a0)
-; CHECK-NEXT:    vand.vi v25, v25, 1
-; CHECK-NEXT:    vmsne.vi v0, v25, 0
-; CHECK-NEXT:    addi sp, sp, 16
+; CHECK-NEXT:    vmandnot.mm v25, v8, v9
+; CHECK-NEXT:    vmand.mm v26, v0, v9
+; CHECK-NEXT:    vmor.mm v0, v26, v25
 ; CHECK-NEXT:    ret
   %v = select <4 x i1> %cc, <4 x i1> %a, <4 x i1> %b
   ret <4 x i1> %v
@@ -363,118 +272,10 @@ define <4 x i1> @vselect_v4i1(<4 x i1> %a, <4 x i1> %b, <4 x i1> %cc) {
 define <8 x i1> @vselect_v8i1(<8 x i1> %a, <8 x i1> %b, <8 x i1> %cc) {
 ; CHECK-LABEL: vselect_v8i1:
 ; CHECK:       # %bb.0:
-; CHECK-NEXT:    addi sp, sp, -16
-; CHECK-NEXT:    .cfi_def_cfa_offset 16
-; CHECK-NEXT:    vmv1r.v v28, v0
 ; CHECK-NEXT:    vsetivli zero, 8, e8,mf2,ta,mu
-; CHECK-NEXT:    vmv.v.i v26, 0
-; CHECK-NEXT:    vmv1r.v v0, v8
-; CHECK-NEXT:    vmerge.vim v25, v26, 1, v0
-; CHECK-NEXT:    vmv1r.v v0, v9
-; CHECK-NEXT:    vmerge.vim v27, v26, 1, v0
-; CHECK-NEXT:    vmv.x.s a0, v27
-; CHECK-NEXT:    andi a0, a0, 1
-; CHECK-NEXT:    vmv1r.v v0, v28
-; CHECK-NEXT:    vmerge.vim v26, v26, 1, v0
-; CHECK-NEXT:    bnez a0, .LBB14_2
-; CHECK-NEXT:  # %bb.1:
-; CHECK-NEXT:    vmv.x.s a0, v25
-; CHECK-NEXT:    j .LBB14_3
-; CHECK-NEXT:  .LBB14_2:
-; CHECK-NEXT:    vmv.x.s a0, v26
-; CHECK-NEXT:  .LBB14_3:
-; CHECK-NEXT:    sb a0, 8(sp)
-; CHECK-NEXT:    vsetivli zero, 1, e8,mf2,ta,mu
-; CHECK-NEXT:    vslidedown.vi v28, v27, 7
-; CHECK-NEXT:    vmv.x.s a0, v28
-; CHECK-NEXT:    andi a0, a0, 1
-; CHECK-NEXT:    bnez a0, .LBB14_5
-; CHECK-NEXT:  # %bb.4:
-; CHECK-NEXT:    vslidedown.vi v28, v25, 7
-; CHECK-NEXT:    j .LBB14_6
-; CHECK-NEXT:  .LBB14_5:
-; CHECK-NEXT:    vslidedown.vi v28, v26, 7
-; CHECK-NEXT:  .LBB14_6:
-; CHECK-NEXT:    vmv.x.s a0, v28
-; CHECK-NEXT:    vslidedown.vi v28, v27, 6
-; CHECK-NEXT:    vmv.x.s a1, v28
-; CHECK-NEXT:    andi a1, a1, 1
-; CHECK-NEXT:    sb a0, 15(sp)
-; CHECK-NEXT:    bnez a1, .LBB14_8
-; CHECK-NEXT:  # %bb.7:
-; CHECK-NEXT:    vslidedown.vi v28, v25, 6
-; CHECK-NEXT:    j .LBB14_9
-; CHECK-NEXT:  .LBB14_8:
-; CHECK-NEXT:    vslidedown.vi v28, v26, 6
-; CHECK-NEXT:  .LBB14_9:
-; CHECK-NEXT:    vmv.x.s a0, v28
-; CHECK-NEXT:    vslidedown.vi v28, v27, 5
-; CHECK-NEXT:    vmv.x.s a1, v28
-; CHECK-NEXT:    andi a1, a1, 1
-; CHECK-NEXT:    sb a0, 14(sp)
-; CHECK-NEXT:    bnez a1, .LBB14_11
-; CHECK-NEXT:  # %bb.10:
-; CHECK-NEXT:    vslidedown.vi v28, v25, 5
-; CHECK-NEXT:    j .LBB14_12
-; CHECK-NEXT:  .LBB14_11:
-; CHECK-NEXT:    vslidedown.vi v28, v26, 5
-; CHECK-NEXT:  .LBB14_12:
-; CHECK-NEXT:    vmv.x.s a0, v28
-; CHECK-NEXT:    vslidedown.vi v28, v27, 4
-; CHECK-NEXT:    vmv.x.s a1, v28
-; CHECK-NEXT:    andi a1, a1, 1
-; CHECK-NEXT:    sb a0, 13(sp)
-; CHECK-NEXT:    bnez a1, .LBB14_14
-; CHECK-NEXT:  # %bb.13:
-; CHECK-NEXT:    vslidedown.vi v28, v25, 4
-; CHECK-NEXT:    j .LBB14_15
-; CHECK-NEXT:  .LBB14_14:
-; CHECK-NEXT:    vslidedown.vi v28, v26, 4
-; CHECK-NEXT:  .LBB14_15:
-; CHECK-NEXT:    vmv.x.s a0, v28
-; CHECK-NEXT:    vslidedown.vi v28, v27, 3
-; CHECK-NEXT:    vmv.x.s a1, v28
-; CHECK-NEXT:    andi a1, a1, 1
-; CHECK-NEXT:    sb a0, 12(sp)
-; CHECK-NEXT:    bnez a1, .LBB14_17
-; CHECK-NEXT:  # %bb.16:
-; CHECK-NEXT:    vslidedown.vi v28, v25, 3
-; CHECK-NEXT:    j .LBB14_18
-; CHECK-NEXT:  .LBB14_17:
-; CHECK-NEXT:    vslidedown.vi v28, v26, 3
-; CHECK-NEXT:  .LBB14_18:
-; CHECK-NEXT:    vmv.x.s a0, v28
-; CHECK-NEXT:    vslidedown.vi v28, v27, 2
-; CHECK-NEXT:    vmv.x.s a1, v28
-; CHECK-NEXT:    andi a1, a1, 1
-; CHECK-NEXT:    sb a0, 11(sp)
-; CHECK-NEXT:    bnez a1, .LBB14_20
-; CHECK-NEXT:  # %bb.19:
-; CHECK-NEXT:    vslidedown.vi v28, v25, 2
-; CHECK-NEXT:    j .LBB14_21
-; CHECK-NEXT:  .LBB14_20:
-; CHECK-NEXT:    vslidedown.vi v28, v26, 2
-; CHECK-NEXT:  .LBB14_21:
-; CHECK-NEXT:    vmv.x.s a0, v28
-; CHECK-NEXT:    vslidedown.vi v27, v27, 1
-; CHECK-NEXT:    vmv.x.s a1, v27
-; CHECK-NEXT:    andi a1, a1, 1
-; CHECK-NEXT:    sb a0, 10(sp)
-; CHECK-NEXT:    bnez a1, .LBB14_23
-; CHECK-NEXT:  # %bb.22:
-; CHECK-NEXT:    vslidedown.vi v25, v25, 1
-; CHECK-NEXT:    j .LBB14_24
-; CHECK-NEXT:  .LBB14_23:
-; CHECK-NEXT:    vslidedown.vi v25, v26, 1
-; CHECK-NEXT:  .LBB14_24:
-; CHECK-NEXT:    vmv.x.s a0, v25
-; CHECK-NEXT:    sb a0, 9(sp)
-; CHECK-NEXT:    vsetivli zero, 8, e8,mf2,ta,mu
-; CHECK-NEXT:    addi a0, sp, 8
-; CHECK-NEXT:    vle8.v v25, (a0)
-; CHECK-NEXT:    vand.vi v25, v25, 1
-; CHECK-NEXT:    vmsne.vi v0, v25, 0
-; CHECK-NEXT:    addi sp, sp, 16
+; CHECK-NEXT:    vmandnot.mm v25, v8, v9
+; CHECK-NEXT:    vmand.mm v26, v0, v9
+; CHECK-NEXT:    vmor.mm v0, v26, v25
 ; CHECK-NEXT:    ret
   %v = select <8 x i1> %cc, <8 x i1> %a, <8 x i1> %b
   ret <8 x i1> %v
@@ -483,2710 +284,37 @@ define <8 x i1> @vselect_v8i1(<8 x i1> %a, <8 x i1> %b, <8 x i1> %cc) {
 define <16 x i1> @vselect_v16i1(<16 x i1> %a, <16 x i1> %b, <16 x i1> %cc) {
 ; CHECK-LABEL: vselect_v16i1:
 ; CHECK:       # %bb.0:
-; CHECK-NEXT:    addi sp, sp, -16
-; CHECK-NEXT:    .cfi_def_cfa_offset 16
-; CHECK-NEXT:    vmv1r.v v28, v0
-; CHECK-NEXT:    vsetivli zero, 16, e8,m1,ta,mu
-; CHECK-NEXT:    vmv.v.i v26, 0
-; CHECK-NEXT:    vmv1r.v v0, v8
-; CHECK-NEXT:    vmerge.vim v25, v26, 1, v0
-; CHECK-NEXT:    vmv1r.v v0, v9
-; CHECK-NEXT:    vmerge.vim v27, v26, 1, v0
-; CHECK-NEXT:    vmv.x.s a0, v27
-; CHECK-NEXT:    andi a0, a0, 1
-; CHECK-NEXT:    vmv1r.v v0, v28
-; CHECK-NEXT:    vmerge.vim v26, v26, 1, v0
-; CHECK-NEXT:    bnez a0, .LBB15_2
-; CHECK-NEXT:  # %bb.1:
-; CHECK-NEXT:    vmv.x.s a0, v25
-; CHECK-NEXT:    j .LBB15_3
-; CHECK-NEXT:  .LBB15_2:
-; CHECK-NEXT:    vmv.x.s a0, v26
-; CHECK-NEXT:  .LBB15_3:
-; CHECK-NEXT:    sb a0, 0(sp)
-; CHECK-NEXT:    vsetivli zero, 1, e8,m1,ta,mu
-; CHECK-NEXT:    vslidedown.vi v28, v27, 15
-; CHECK-NEXT:    vmv.x.s a0, v28
-; CHECK-NEXT:    andi a0, a0, 1
-; CHECK-NEXT:    bnez a0, .LBB15_5
-; CHECK-NEXT:  # %bb.4:
-; CHECK-NEXT:    vslidedown.vi v28, v25, 15
-; CHECK-NEXT:    j .LBB15_6
-; CHECK-NEXT:  .LBB15_5:
-; CHECK-NEXT:    vslidedown.vi v28, v26, 15
-; CHECK-NEXT:  .LBB15_6:
-; CHECK-NEXT:    vmv.x.s a0, v28
-; CHECK-NEXT:    vslidedown.vi v28, v27, 14
-; CHECK-NEXT:    vmv.x.s a1, v28
-; CHECK-NEXT:    andi a1, a1, 1
-; CHECK-NEXT:    sb a0, 15(sp)
-; CHECK-NEXT:    bnez a1, .LBB15_8
-; CHECK-NEXT:  # %bb.7:
-; CHECK-NEXT:    vslidedown.vi v28, v25, 14
-; CHECK-NEXT:    j .LBB15_9
-; CHECK-NEXT:  .LBB15_8:
-; CHECK-NEXT:    vslidedown.vi v28, v26, 14
-; CHECK-NEXT:  .LBB15_9:
-; CHECK-NEXT:    vmv.x.s a0, v28
-; CHECK-NEXT:    vslidedown.vi v28, v27, 13
-; CHECK-NEXT:    vmv.x.s a1, v28
-; CHECK-NEXT:    andi a1, a1, 1
-; CHECK-NEXT:    sb a0, 14(sp)
-; CHECK-NEXT:    bnez a1, .LBB15_11
-; CHECK-NEXT:  # %bb.10:
-; CHECK-NEXT:    vslidedown.vi v28, v25, 13
-; CHECK-NEXT:    j .LBB15_12
-; CHECK-NEXT:  .LBB15_11:
-; CHECK-NEXT:    vslidedown.vi v28, v26, 13
-; CHECK-NEXT:  .LBB15_12:
-; CHECK-NEXT:    vmv.x.s a0, v28
-; CHECK-NEXT:    vslidedown.vi v28, v27, 12
-; CHECK-NEXT:    vmv.x.s a1, v28
-; CHECK-NEXT:    andi a1, a1, 1
-; CHECK-NEXT:    sb a0, 13(sp)
-; CHECK-NEXT:    bnez a1, .LBB15_14
-; CHECK-NEXT:  # %bb.13:
-; CHECK-NEXT:    vslidedown.vi v28, v25, 12
-; CHECK-NEXT:    j .LBB15_15
-; CHECK-NEXT:  .LBB15_14:
-; CHECK-NEXT:    vslidedown.vi v28, v26, 12
-; CHECK-NEXT:  .LBB15_15:
-; CHECK-NEXT:    vmv.x.s a0, v28
-; CHECK-NEXT:    vslidedown.vi v28, v27, 11
-; CHECK-NEXT:    vmv.x.s a1, v28
-; CHECK-NEXT:    andi a1, a1, 1
-; CHECK-NEXT:    sb a0, 12(sp)
-; CHECK-NEXT:    bnez a1, .LBB15_17
-; CHECK-NEXT:  # %bb.16:
-; CHECK-NEXT:    vslidedown.vi v28, v25, 11
-; CHECK-NEXT:    j .LBB15_18
-; CHECK-NEXT:  .LBB15_17:
-; CHECK-NEXT:    vslidedown.vi v28, v26, 11
-; CHECK-NEXT:  .LBB15_18:
-; CHECK-NEXT:    vmv.x.s a0, v28
-; CHECK-NEXT:    vslidedown.vi v28, v27, 10
-; CHECK-NEXT:    vmv.x.s a1, v28
-; CHECK-NEXT:    andi a1, a1, 1
-; CHECK-NEXT:    sb a0, 11(sp)
-; CHECK-NEXT:    bnez a1, .LBB15_20
-; CHECK-NEXT:  # %bb.19:
-; CHECK-NEXT:    vslidedown.vi v28, v25, 10
-; CHECK-NEXT:    j .LBB15_21
-; CHECK-NEXT:  .LBB15_20:
-; CHECK-NEXT:    vslidedown.vi v28, v26, 10
-; CHECK-NEXT:  .LBB15_21:
-; CHECK-NEXT:    vmv.x.s a0, v28
-; CHECK-NEXT:    vslidedown.vi v28, v27, 9
-; CHECK-NEXT:    vmv.x.s a1, v28
-; CHECK-NEXT:    andi a1, a1, 1
-; CHECK-NEXT:    sb a0, 10(sp)
-; CHECK-NEXT:    bnez a1, .LBB15_23
-; CHECK-NEXT:  # %bb.22:
-; CHECK-NEXT:    vslidedown.vi v28, v25, 9
-; CHECK-NEXT:    j .LBB15_24
-; CHECK-NEXT:  .LBB15_23:
-; CHECK-NEXT:    vslidedown.vi v28, v26, 9
-; CHECK-NEXT:  .LBB15_24:
-; CHECK-NEXT:    vmv.x.s a0, v28
-; CHECK-NEXT:    vslidedown.vi v28, v27, 8
-; CHECK-NEXT:    vmv.x.s a1, v28
-; CHECK-NEXT:    andi a1, a1, 1
-; CHECK-NEXT:    sb a0, 9(sp)
-; CHECK-NEXT:    bnez a1, .LBB15_26
-; CHECK-NEXT:  # %bb.25:
-; CHECK-NEXT:    vslidedown.vi v28, v25, 8
-; CHECK-NEXT:    j .LBB15_27
-; CHECK-NEXT:  .LBB15_26:
-; CHECK-NEXT:    vslidedown.vi v28, v26, 8
-; CHECK-NEXT:  .LBB15_27:
-; CHECK-NEXT:    vmv.x.s a0, v28
-; CHECK-NEXT:    vslidedown.vi v28, v27, 7
-; CHECK-NEXT:    vmv.x.s a1, v28
-; CHECK-NEXT:    andi a1, a1, 1
-; CHECK-NEXT:    sb a0, 8(sp)
-; CHECK-NEXT:    bnez a1, .LBB15_29
-; CHECK-NEXT:  # %bb.28:
-; CHECK-NEXT:    vslidedown.vi v28, v25, 7
-; CHECK-NEXT:    j .LBB15_30
-; CHECK-NEXT:  .LBB15_29:
-; CHECK-NEXT:    vslidedown.vi v28, v26, 7
-; CHECK-NEXT:  .LBB15_30:
-; CHECK-NEXT:    vmv.x.s a0, v28
-; CHECK-NEXT:    vslidedown.vi v28, v27, 6
-; CHECK-NEXT:    vmv.x.s a1, v28
-; CHECK-NEXT:    andi a1, a1, 1
-; CHECK-NEXT:    sb a0, 7(sp)
-; CHECK-NEXT:    bnez a1, .LBB15_32
-; CHECK-NEXT:  # %bb.31:
-; CHECK-NEXT:    vslidedown.vi v28, v25, 6
-; CHECK-NEXT:    j .LBB15_33
-; CHECK-NEXT:  .LBB15_32:
-; CHECK-NEXT:    vslidedown.vi v28, v26, 6
-; CHECK-NEXT:  .LBB15_33:
-; CHECK-NEXT:    vmv.x.s a0, v28
-; CHECK-NEXT:    vslidedown.vi v28, v27, 5
-; CHECK-NEXT:    vmv.x.s a1, v28
-; CHECK-NEXT:    andi a1, a1, 1
-; CHECK-NEXT:    sb a0, 6(sp)
-; CHECK-NEXT:    bnez a1, .LBB15_35
-; CHECK-NEXT:  # %bb.34:
-; CHECK-NEXT:    vslidedown.vi v28, v25, 5
-; CHECK-NEXT:    j .LBB15_36
-; CHECK-NEXT:  .LBB15_35:
-; CHECK-NEXT:    vslidedown.vi v28, v26, 5
-; CHECK-NEXT:  .LBB15_36:
-; CHECK-NEXT:    vmv.x.s a0, v28
-; CHECK-NEXT:    vslidedown.vi v28, v27, 4
-; CHECK-NEXT:    vmv.x.s a1, v28
-; CHECK-NEXT:    andi a1, a1, 1
-; CHECK-NEXT:    sb a0, 5(sp)
-; CHECK-NEXT:    bnez a1, .LBB15_38
-; CHECK-NEXT:  # %bb.37:
-; CHECK-NEXT:    vslidedown.vi v28, v25, 4
-; CHECK-NEXT:    j .LBB15_39
-; CHECK-NEXT:  .LBB15_38:
-; CHECK-NEXT:    vslidedown.vi v28, v26, 4
-; CHECK-NEXT:  .LBB15_39:
-; CHECK-NEXT:    vmv.x.s a0, v28
-; CHECK-NEXT:    vslidedown.vi v28, v27, 3
-; CHECK-NEXT:    vmv.x.s a1, v28
-; CHECK-NEXT:    andi a1, a1, 1
-; CHECK-NEXT:    sb a0, 4(sp)
-; CHECK-NEXT:    bnez a1, .LBB15_41
-; CHECK-NEXT:  # %bb.40:
-; CHECK-NEXT:    vslidedown.vi v28, v25, 3
-; CHECK-NEXT:    j .LBB15_42
-; CHECK-NEXT:  .LBB15_41:
-; CHECK-NEXT:    vslidedown.vi v28, v26, 3
-; CHECK-NEXT:  .LBB15_42:
-; CHECK-NEXT:    vmv.x.s a0, v28
-; CHECK-NEXT:    vslidedown.vi v28, v27, 2
-; CHECK-NEXT:    vmv.x.s a1, v28
-; CHECK-NEXT:    andi a1, a1, 1
-; CHECK-NEXT:    sb a0, 3(sp)
-; CHECK-NEXT:    bnez a1, .LBB15_44
-; CHECK-NEXT:  # %bb.43:
-; CHECK-NEXT:    vslidedown.vi v28, v25, 2
-; CHECK-NEXT:    j .LBB15_45
-; CHECK-NEXT:  .LBB15_44:
-; CHECK-NEXT:    vslidedown.vi v28, v26, 2
-; CHECK-NEXT:  .LBB15_45:
-; CHECK-NEXT:    vmv.x.s a0, v28
-; CHECK-NEXT:    vslidedown.vi v27, v27, 1
-; CHECK-NEXT:    vmv.x.s a1, v27
-; CHECK-NEXT:    andi a1, a1, 1
-; CHECK-NEXT:    sb a0, 2(sp)
-; CHECK-NEXT:    bnez a1, .LBB15_47
-; CHECK-NEXT:  # %bb.46:
-; CHECK-NEXT:    vslidedown.vi v25, v25, 1
-; CHECK-NEXT:    j .LBB15_48
-; CHECK-NEXT:  .LBB15_47:
-; CHECK-NEXT:    vslidedown.vi v25, v26, 1
-; CHECK-NEXT:  .LBB15_48:
-; CHECK-NEXT:    vmv.x.s a0, v25
-; CHECK-NEXT:    sb a0, 1(sp)
 ; CHECK-NEXT:    vsetivli zero, 16, e8,m1,ta,mu
-; CHECK-NEXT:    vle8.v v25, (sp)
-; CHECK-NEXT:    vand.vi v25, v25, 1
-; CHECK-NEXT:    vmsne.vi v0, v25, 0
-; CHECK-NEXT:    addi sp, sp, 16
+; CHECK-NEXT:    vmandnot.mm v25, v8, v9
+; CHECK-NEXT:    vmand.mm v26, v0, v9
+; CHECK-NEXT:    vmor.mm v0, v26, v25
 ; CHECK-NEXT:    ret
   %v = select <16 x i1> %cc, <16 x i1> %a, <16 x i1> %b
   ret <16 x i1> %v
 }
 
 define <32 x i1> @vselect_v32i1(<32 x i1> %a, <32 x i1> %b, <32 x i1> %cc) {
-; RV32-LABEL: vselect_v32i1:
-; RV32:       # %bb.0:
-; RV32-NEXT:    addi sp, sp, -64
-; RV32-NEXT:    .cfi_def_cfa_offset 64
-; RV32-NEXT:    sw ra, 60(sp) # 4-byte Folded Spill
-; RV32-NEXT:    sw s0, 56(sp) # 4-byte Folded Spill
-; RV32-NEXT:    .cfi_offset ra, -4
-; RV32-NEXT:    .cfi_offset s0, -8
-; RV32-NEXT:    addi s0, sp, 64
-; RV32-NEXT:    .cfi_def_cfa s0, 0
-; RV32-NEXT:    andi sp, sp, -32
-; RV32-NEXT:    vmv1r.v v25, v0
-; RV32-NEXT:    addi a0, zero, 32
-; RV32-NEXT:    vsetvli zero, a0, e8,m2,ta,mu
-; RV32-NEXT:    vmv.v.i v28, 0
-; RV32-NEXT:    vmv1r.v v0, v8
-; RV32-NEXT:    vmerge.vim v26, v28, 1, v0
-; RV32-NEXT:    vmv1r.v v0, v9
-; RV32-NEXT:    vmerge.vim v30, v28, 1, v0
-; RV32-NEXT:    vmv.x.s a1, v30
-; RV32-NEXT:    andi a1, a1, 1
-; RV32-NEXT:    vmv1r.v v0, v25
-; RV32-NEXT:    vmerge.vim v28, v28, 1, v0
-; RV32-NEXT:    bnez a1, .LBB16_2
-; RV32-NEXT:  # %bb.1:
-; RV32-NEXT:    vmv.x.s a1, v26
-; RV32-NEXT:    j .LBB16_3
-; RV32-NEXT:  .LBB16_2:
-; RV32-NEXT:    vmv.x.s a1, v28
-; RV32-NEXT:  .LBB16_3:
-; RV32-NEXT:    sb a1, 0(sp)
-; RV32-NEXT:    vsetivli zero, 1, e8,m2,ta,mu
-; RV32-NEXT:    vslidedown.vi v8, v30, 31
-; RV32-NEXT:    vmv.x.s a1, v8
-; RV32-NEXT:    andi a1, a1, 1
-; RV32-NEXT:    bnez a1, .LBB16_5
-; RV32-NEXT:  # %bb.4:
-; RV32-NEXT:    vslidedown.vi v8, v26, 31
-; RV32-NEXT:    j .LBB16_6
-; RV32-NEXT:  .LBB16_5:
-; RV32-NEXT:    vslidedown.vi v8, v28, 31
-; RV32-NEXT:  .LBB16_6:
-; RV32-NEXT:    vmv.x.s a1, v8
-; RV32-NEXT:    vslidedown.vi v8, v30, 30
-; RV32-NEXT:    vmv.x.s a2, v8
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 31(sp)
-; RV32-NEXT:    bnez a2, .LBB16_8
-; RV32-NEXT:  # %bb.7:
-; RV32-NEXT:    vslidedown.vi v8, v26, 30
-; RV32-NEXT:    j .LBB16_9
-; RV32-NEXT:  .LBB16_8:
-; RV32-NEXT:    vslidedown.vi v8, v28, 30
-; RV32-NEXT:  .LBB16_9:
-; RV32-NEXT:    vmv.x.s a1, v8
-; RV32-NEXT:    vslidedown.vi v8, v30, 29
-; RV32-NEXT:    vmv.x.s a2, v8
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 30(sp)
-; RV32-NEXT:    bnez a2, .LBB16_11
-; RV32-NEXT:  # %bb.10:
-; RV32-NEXT:    vslidedown.vi v8, v26, 29
-; RV32-NEXT:    j .LBB16_12
-; RV32-NEXT:  .LBB16_11:
-; RV32-NEXT:    vslidedown.vi v8, v28, 29
-; RV32-NEXT:  .LBB16_12:
-; RV32-NEXT:    vmv.x.s a1, v8
-; RV32-NEXT:    vslidedown.vi v8, v30, 28
-; RV32-NEXT:    vmv.x.s a2, v8
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 29(sp)
-; RV32-NEXT:    bnez a2, .LBB16_14
-; RV32-NEXT:  # %bb.13:
-; RV32-NEXT:    vslidedown.vi v8, v26, 28
-; RV32-NEXT:    j .LBB16_15
-; RV32-NEXT:  .LBB16_14:
-; RV32-NEXT:    vslidedown.vi v8, v28, 28
-; RV32-NEXT:  .LBB16_15:
-; RV32-NEXT:    vmv.x.s a1, v8
-; RV32-NEXT:    vslidedown.vi v8, v30, 27
-; RV32-NEXT:    vmv.x.s a2, v8
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 28(sp)
-; RV32-NEXT:    bnez a2, .LBB16_17
-; RV32-NEXT:  # %bb.16:
-; RV32-NEXT:    vslidedown.vi v8, v26, 27
-; RV32-NEXT:    j .LBB16_18
-; RV32-NEXT:  .LBB16_17:
-; RV32-NEXT:    vslidedown.vi v8, v28, 27
-; RV32-NEXT:  .LBB16_18:
-; RV32-NEXT:    vmv.x.s a1, v8
-; RV32-NEXT:    vslidedown.vi v8, v30, 26
-; RV32-NEXT:    vmv.x.s a2, v8
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 27(sp)
-; RV32-NEXT:    bnez a2, .LBB16_20
-; RV32-NEXT:  # %bb.19:
-; RV32-NEXT:    vslidedown.vi v8, v26, 26
-; RV32-NEXT:    j .LBB16_21
-; RV32-NEXT:  .LBB16_20:
-; RV32-NEXT:    vslidedown.vi v8, v28, 26
-; RV32-NEXT:  .LBB16_21:
-; RV32-NEXT:    vmv.x.s a1, v8
-; RV32-NEXT:    vslidedown.vi v8, v30, 25
-; RV32-NEXT:    vmv.x.s a2, v8
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 26(sp)
-; RV32-NEXT:    bnez a2, .LBB16_23
-; RV32-NEXT:  # %bb.22:
-; RV32-NEXT:    vslidedown.vi v8, v26, 25
-; RV32-NEXT:    j .LBB16_24
-; RV32-NEXT:  .LBB16_23:
-; RV32-NEXT:    vslidedown.vi v8, v28, 25
-; RV32-NEXT:  .LBB16_24:
-; RV32-NEXT:    vmv.x.s a1, v8
-; RV32-NEXT:    vslidedown.vi v8, v30, 24
-; RV32-NEXT:    vmv.x.s a2, v8
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 25(sp)
-; RV32-NEXT:    bnez a2, .LBB16_26
-; RV32-NEXT:  # %bb.25:
-; RV32-NEXT:    vslidedown.vi v8, v26, 24
-; RV32-NEXT:    j .LBB16_27
-; RV32-NEXT:  .LBB16_26:
-; RV32-NEXT:    vslidedown.vi v8, v28, 24
-; RV32-NEXT:  .LBB16_27:
-; RV32-NEXT:    vmv.x.s a1, v8
-; RV32-NEXT:    vslidedown.vi v8, v30, 23
-; RV32-NEXT:    vmv.x.s a2, v8
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 24(sp)
-; RV32-NEXT:    bnez a2, .LBB16_29
-; RV32-NEXT:  # %bb.28:
-; RV32-NEXT:    vslidedown.vi v8, v26, 23
-; RV32-NEXT:    j .LBB16_30
-; RV32-NEXT:  .LBB16_29:
-; RV32-NEXT:    vslidedown.vi v8, v28, 23
-; RV32-NEXT:  .LBB16_30:
-; RV32-NEXT:    vmv.x.s a1, v8
-; RV32-NEXT:    vslidedown.vi v8, v30, 22
-; RV32-NEXT:    vmv.x.s a2, v8
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 23(sp)
-; RV32-NEXT:    bnez a2, .LBB16_32
-; RV32-NEXT:  # %bb.31:
-; RV32-NEXT:    vslidedown.vi v8, v26, 22
-; RV32-NEXT:    j .LBB16_33
-; RV32-NEXT:  .LBB16_32:
-; RV32-NEXT:    vslidedown.vi v8, v28, 22
-; RV32-NEXT:  .LBB16_33:
-; RV32-NEXT:    vmv.x.s a1, v8
-; RV32-NEXT:    vslidedown.vi v8, v30, 21
-; RV32-NEXT:    vmv.x.s a2, v8
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 22(sp)
-; RV32-NEXT:    bnez a2, .LBB16_35
-; RV32-NEXT:  # %bb.34:
-; RV32-NEXT:    vslidedown.vi v8, v26, 21
-; RV32-NEXT:    j .LBB16_36
-; RV32-NEXT:  .LBB16_35:
-; RV32-NEXT:    vslidedown.vi v8, v28, 21
-; RV32-NEXT:  .LBB16_36:
-; RV32-NEXT:    vmv.x.s a1, v8
-; RV32-NEXT:    vslidedown.vi v8, v30, 20
-; RV32-NEXT:    vmv.x.s a2, v8
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 21(sp)
-; RV32-NEXT:    bnez a2, .LBB16_38
-; RV32-NEXT:  # %bb.37:
-; RV32-NEXT:    vslidedown.vi v8, v26, 20
-; RV32-NEXT:    j .LBB16_39
-; RV32-NEXT:  .LBB16_38:
-; RV32-NEXT:    vslidedown.vi v8, v28, 20
-; RV32-NEXT:  .LBB16_39:
-; RV32-NEXT:    vmv.x.s a1, v8
-; RV32-NEXT:    vslidedown.vi v8, v30, 19
-; RV32-NEXT:    vmv.x.s a2, v8
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 20(sp)
-; RV32-NEXT:    bnez a2, .LBB16_41
-; RV32-NEXT:  # %bb.40:
-; RV32-NEXT:    vslidedown.vi v8, v26, 19
-; RV32-NEXT:    j .LBB16_42
-; RV32-NEXT:  .LBB16_41:
-; RV32-NEXT:    vslidedown.vi v8, v28, 19
-; RV32-NEXT:  .LBB16_42:
-; RV32-NEXT:    vmv.x.s a1, v8
-; RV32-NEXT:    vslidedown.vi v8, v30, 18
-; RV32-NEXT:    vmv.x.s a2, v8
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 19(sp)
-; RV32-NEXT:    bnez a2, .LBB16_44
-; RV32-NEXT:  # %bb.43:
-; RV32-NEXT:    vslidedown.vi v8, v26, 18
-; RV32-NEXT:    j .LBB16_45
-; RV32-NEXT:  .LBB16_44:
-; RV32-NEXT:    vslidedown.vi v8, v28, 18
-; RV32-NEXT:  .LBB16_45:
-; RV32-NEXT:    vmv.x.s a1, v8
-; RV32-NEXT:    vslidedown.vi v8, v30, 17
-; RV32-NEXT:    vmv.x.s a2, v8
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 18(sp)
-; RV32-NEXT:    bnez a2, .LBB16_47
-; RV32-NEXT:  # %bb.46:
-; RV32-NEXT:    vslidedown.vi v8, v26, 17
-; RV32-NEXT:    j .LBB16_48
-; RV32-NEXT:  .LBB16_47:
-; RV32-NEXT:    vslidedown.vi v8, v28, 17
-; RV32-NEXT:  .LBB16_48:
-; RV32-NEXT:    vmv.x.s a1, v8
-; RV32-NEXT:    vslidedown.vi v8, v30, 16
-; RV32-NEXT:    vmv.x.s a2, v8
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 17(sp)
-; RV32-NEXT:    bnez a2, .LBB16_50
-; RV32-NEXT:  # %bb.49:
-; RV32-NEXT:    vslidedown.vi v8, v26, 16
-; RV32-NEXT:    j .LBB16_51
-; RV32-NEXT:  .LBB16_50:
-; RV32-NEXT:    vslidedown.vi v8, v28, 16
-; RV32-NEXT:  .LBB16_51:
-; RV32-NEXT:    vmv.x.s a1, v8
-; RV32-NEXT:    vslidedown.vi v8, v30, 15
-; RV32-NEXT:    vmv.x.s a2, v8
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 16(sp)
-; RV32-NEXT:    bnez a2, .LBB16_53
-; RV32-NEXT:  # %bb.52:
-; RV32-NEXT:    vslidedown.vi v8, v26, 15
-; RV32-NEXT:    j .LBB16_54
-; RV32-NEXT:  .LBB16_53:
-; RV32-NEXT:    vslidedown.vi v8, v28, 15
-; RV32-NEXT:  .LBB16_54:
-; RV32-NEXT:    vmv.x.s a1, v8
-; RV32-NEXT:    vslidedown.vi v8, v30, 14
-; RV32-NEXT:    vmv.x.s a2, v8
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 15(sp)
-; RV32-NEXT:    bnez a2, .LBB16_56
-; RV32-NEXT:  # %bb.55:
-; RV32-NEXT:    vslidedown.vi v8, v26, 14
-; RV32-NEXT:    j .LBB16_57
-; RV32-NEXT:  .LBB16_56:
-; RV32-NEXT:    vslidedown.vi v8, v28, 14
-; RV32-NEXT:  .LBB16_57:
-; RV32-NEXT:    vmv.x.s a1, v8
-; RV32-NEXT:    vslidedown.vi v8, v30, 13
-; RV32-NEXT:    vmv.x.s a2, v8
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 14(sp)
-; RV32-NEXT:    bnez a2, .LBB16_59
-; RV32-NEXT:  # %bb.58:
-; RV32-NEXT:    vslidedown.vi v8, v26, 13
-; RV32-NEXT:    j .LBB16_60
-; RV32-NEXT:  .LBB16_59:
-; RV32-NEXT:    vslidedown.vi v8, v28, 13
-; RV32-NEXT:  .LBB16_60:
-; RV32-NEXT:    vmv.x.s a1, v8
-; RV32-NEXT:    vslidedown.vi v8, v30, 12
-; RV32-NEXT:    vmv.x.s a2, v8
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 13(sp)
-; RV32-NEXT:    bnez a2, .LBB16_62
-; RV32-NEXT:  # %bb.61:
-; RV32-NEXT:    vslidedown.vi v8, v26, 12
-; RV32-NEXT:    j .LBB16_63
-; RV32-NEXT:  .LBB16_62:
-; RV32-NEXT:    vslidedown.vi v8, v28, 12
-; RV32-NEXT:  .LBB16_63:
-; RV32-NEXT:    vmv.x.s a1, v8
-; RV32-NEXT:    vslidedown.vi v8, v30, 11
-; RV32-NEXT:    vmv.x.s a2, v8
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 12(sp)
-; RV32-NEXT:    bnez a2, .LBB16_65
-; RV32-NEXT:  # %bb.64:
-; RV32-NEXT:    vslidedown.vi v8, v26, 11
-; RV32-NEXT:    j .LBB16_66
-; RV32-NEXT:  .LBB16_65:
-; RV32-NEXT:    vslidedown.vi v8, v28, 11
-; RV32-NEXT:  .LBB16_66:
-; RV32-NEXT:    vmv.x.s a1, v8
-; RV32-NEXT:    vslidedown.vi v8, v30, 10
-; RV32-NEXT:    vmv.x.s a2, v8
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 11(sp)
-; RV32-NEXT:    bnez a2, .LBB16_68
-; RV32-NEXT:  # %bb.67:
-; RV32-NEXT:    vslidedown.vi v8, v26, 10
-; RV32-NEXT:    j .LBB16_69
-; RV32-NEXT:  .LBB16_68:
-; RV32-NEXT:    vslidedown.vi v8, v28, 10
-; RV32-NEXT:  .LBB16_69:
-; RV32-NEXT:    vmv.x.s a1, v8
-; RV32-NEXT:    vslidedown.vi v8, v30, 9
-; RV32-NEXT:    vmv.x.s a2, v8
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 10(sp)
-; RV32-NEXT:    bnez a2, .LBB16_71
-; RV32-NEXT:  # %bb.70:
-; RV32-NEXT:    vslidedown.vi v8, v26, 9
-; RV32-NEXT:    j .LBB16_72
-; RV32-NEXT:  .LBB16_71:
-; RV32-NEXT:    vslidedown.vi v8, v28, 9
-; RV32-NEXT:  .LBB16_72:
-; RV32-NEXT:    vmv.x.s a1, v8
-; RV32-NEXT:    vslidedown.vi v8, v30, 8
-; RV32-NEXT:    vmv.x.s a2, v8
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 9(sp)
-; RV32-NEXT:    bnez a2, .LBB16_74
-; RV32-NEXT:  # %bb.73:
-; RV32-NEXT:    vslidedown.vi v8, v26, 8
-; RV32-NEXT:    j .LBB16_75
-; RV32-NEXT:  .LBB16_74:
-; RV32-NEXT:    vslidedown.vi v8, v28, 8
-; RV32-NEXT:  .LBB16_75:
-; RV32-NEXT:    vmv.x.s a1, v8
-; RV32-NEXT:    vslidedown.vi v8, v30, 7
-; RV32-NEXT:    vmv.x.s a2, v8
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 8(sp)
-; RV32-NEXT:    bnez a2, .LBB16_77
-; RV32-NEXT:  # %bb.76:
-; RV32-NEXT:    vslidedown.vi v8, v26, 7
-; RV32-NEXT:    j .LBB16_78
-; RV32-NEXT:  .LBB16_77:
-; RV32-NEXT:    vslidedown.vi v8, v28, 7
-; RV32-NEXT:  .LBB16_78:
-; RV32-NEXT:    vmv.x.s a1, v8
-; RV32-NEXT:    vslidedown.vi v8, v30, 6
-; RV32-NEXT:    vmv.x.s a2, v8
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 7(sp)
-; RV32-NEXT:    bnez a2, .LBB16_80
-; RV32-NEXT:  # %bb.79:
-; RV32-NEXT:    vslidedown.vi v8, v26, 6
-; RV32-NEXT:    j .LBB16_81
-; RV32-NEXT:  .LBB16_80:
-; RV32-NEXT:    vslidedown.vi v8, v28, 6
-; RV32-NEXT:  .LBB16_81:
-; RV32-NEXT:    vmv.x.s a1, v8
-; RV32-NEXT:    vslidedown.vi v8, v30, 5
-; RV32-NEXT:    vmv.x.s a2, v8
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 6(sp)
-; RV32-NEXT:    bnez a2, .LBB16_83
-; RV32-NEXT:  # %bb.82:
-; RV32-NEXT:    vslidedown.vi v8, v26, 5
-; RV32-NEXT:    j .LBB16_84
-; RV32-NEXT:  .LBB16_83:
-; RV32-NEXT:    vslidedown.vi v8, v28, 5
-; RV32-NEXT:  .LBB16_84:
-; RV32-NEXT:    vmv.x.s a1, v8
-; RV32-NEXT:    vslidedown.vi v8, v30, 4
-; RV32-NEXT:    vmv.x.s a2, v8
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 5(sp)
-; RV32-NEXT:    bnez a2, .LBB16_86
-; RV32-NEXT:  # %bb.85:
-; RV32-NEXT:    vslidedown.vi v8, v26, 4
-; RV32-NEXT:    j .LBB16_87
-; RV32-NEXT:  .LBB16_86:
-; RV32-NEXT:    vslidedown.vi v8, v28, 4
-; RV32-NEXT:  .LBB16_87:
-; RV32-NEXT:    vmv.x.s a1, v8
-; RV32-NEXT:    vslidedown.vi v8, v30, 3
-; RV32-NEXT:    vmv.x.s a2, v8
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 4(sp)
-; RV32-NEXT:    bnez a2, .LBB16_89
-; RV32-NEXT:  # %bb.88:
-; RV32-NEXT:    vslidedown.vi v8, v26, 3
-; RV32-NEXT:    j .LBB16_90
-; RV32-NEXT:  .LBB16_89:
-; RV32-NEXT:    vslidedown.vi v8, v28, 3
-; RV32-NEXT:  .LBB16_90:
-; RV32-NEXT:    vmv.x.s a1, v8
-; RV32-NEXT:    vslidedown.vi v8, v30, 2
-; RV32-NEXT:    vmv.x.s a2, v8
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 3(sp)
-; RV32-NEXT:    bnez a2, .LBB16_92
-; RV32-NEXT:  # %bb.91:
-; RV32-NEXT:    vslidedown.vi v8, v26, 2
-; RV32-NEXT:    j .LBB16_93
-; RV32-NEXT:  .LBB16_92:
-; RV32-NEXT:    vslidedown.vi v8, v28, 2
-; RV32-NEXT:  .LBB16_93:
-; RV32-NEXT:    vmv.x.s a1, v8
-; RV32-NEXT:    vslidedown.vi v30, v30, 1
-; RV32-NEXT:    vmv.x.s a2, v30
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 2(sp)
-; RV32-NEXT:    bnez a2, .LBB16_95
-; RV32-NEXT:  # %bb.94:
-; RV32-NEXT:    vslidedown.vi v26, v26, 1
-; RV32-NEXT:    j .LBB16_96
-; RV32-NEXT:  .LBB16_95:
-; RV32-NEXT:    vslidedown.vi v26, v28, 1
-; RV32-NEXT:  .LBB16_96:
-; RV32-NEXT:    vmv.x.s a1, v26
-; RV32-NEXT:    sb a1, 1(sp)
-; RV32-NEXT:    vsetvli zero, a0, e8,m2,ta,mu
-; RV32-NEXT:    vle8.v v26, (sp)
-; RV32-NEXT:    vand.vi v26, v26, 1
-; RV32-NEXT:    vmsne.vi v0, v26, 0
-; RV32-NEXT:    addi sp, s0, -64
-; RV32-NEXT:    lw s0, 56(sp) # 4-byte Folded Reload
-; RV32-NEXT:    lw ra, 60(sp) # 4-byte Folded Reload
-; RV32-NEXT:    addi sp, sp, 64
-; RV32-NEXT:    ret
-;
-; RV64-LABEL: vselect_v32i1:
-; RV64:       # %bb.0:
-; RV64-NEXT:    addi sp, sp, -64
-; RV64-NEXT:    .cfi_def_cfa_offset 64
-; RV64-NEXT:    sd ra, 56(sp) # 8-byte Folded Spill
-; RV64-NEXT:    sd s0, 48(sp) # 8-byte Folded Spill
-; RV64-NEXT:    .cfi_offset ra, -8
-; RV64-NEXT:    .cfi_offset s0, -16
-; RV64-NEXT:    addi s0, sp, 64
-; RV64-NEXT:    .cfi_def_cfa s0, 0
-; RV64-NEXT:    andi sp, sp, -32
-; RV64-NEXT:    vmv1r.v v25, v0
-; RV64-NEXT:    addi a0, zero, 32
-; RV64-NEXT:    vsetvli zero, a0, e8,m2,ta,mu
-; RV64-NEXT:    vmv.v.i v28, 0
-; RV64-NEXT:    vmv1r.v v0, v8
-; RV64-NEXT:    vmerge.vim v26, v28, 1, v0
-; RV64-NEXT:    vmv1r.v v0, v9
-; RV64-NEXT:    vmerge.vim v30, v28, 1, v0
-; RV64-NEXT:    vmv.x.s a1, v30
-; RV64-NEXT:    andi a1, a1, 1
-; RV64-NEXT:    vmv1r.v v0, v25
-; RV64-NEXT:    vmerge.vim v28, v28, 1, v0
-; RV64-NEXT:    bnez a1, .LBB16_2
-; RV64-NEXT:  # %bb.1:
-; RV64-NEXT:    vmv.x.s a1, v26
-; RV64-NEXT:    j .LBB16_3
-; RV64-NEXT:  .LBB16_2:
-; RV64-NEXT:    vmv.x.s a1, v28
-; RV64-NEXT:  .LBB16_3:
-; RV64-NEXT:    sb a1, 0(sp)
-; RV64-NEXT:    vsetivli zero, 1, e8,m2,ta,mu
-; RV64-NEXT:    vslidedown.vi v8, v30, 31
-; RV64-NEXT:    vmv.x.s a1, v8
-; RV64-NEXT:    andi a1, a1, 1
-; RV64-NEXT:    bnez a1, .LBB16_5
-; RV64-NEXT:  # %bb.4:
-; RV64-NEXT:    vslidedown.vi v8, v26, 31
-; RV64-NEXT:    j .LBB16_6
-; RV64-NEXT:  .LBB16_5:
-; RV64-NEXT:    vslidedown.vi v8, v28, 31
-; RV64-NEXT:  .LBB16_6:
-; RV64-NEXT:    vmv.x.s a1, v8
-; RV64-NEXT:    vslidedown.vi v8, v30, 30
-; RV64-NEXT:    vmv.x.s a2, v8
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 31(sp)
-; RV64-NEXT:    bnez a2, .LBB16_8
-; RV64-NEXT:  # %bb.7:
-; RV64-NEXT:    vslidedown.vi v8, v26, 30
-; RV64-NEXT:    j .LBB16_9
-; RV64-NEXT:  .LBB16_8:
-; RV64-NEXT:    vslidedown.vi v8, v28, 30
-; RV64-NEXT:  .LBB16_9:
-; RV64-NEXT:    vmv.x.s a1, v8
-; RV64-NEXT:    vslidedown.vi v8, v30, 29
-; RV64-NEXT:    vmv.x.s a2, v8
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 30(sp)
-; RV64-NEXT:    bnez a2, .LBB16_11
-; RV64-NEXT:  # %bb.10:
-; RV64-NEXT:    vslidedown.vi v8, v26, 29
-; RV64-NEXT:    j .LBB16_12
-; RV64-NEXT:  .LBB16_11:
-; RV64-NEXT:    vslidedown.vi v8, v28, 29
-; RV64-NEXT:  .LBB16_12:
-; RV64-NEXT:    vmv.x.s a1, v8
-; RV64-NEXT:    vslidedown.vi v8, v30, 28
-; RV64-NEXT:    vmv.x.s a2, v8
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 29(sp)
-; RV64-NEXT:    bnez a2, .LBB16_14
-; RV64-NEXT:  # %bb.13:
-; RV64-NEXT:    vslidedown.vi v8, v26, 28
-; RV64-NEXT:    j .LBB16_15
-; RV64-NEXT:  .LBB16_14:
-; RV64-NEXT:    vslidedown.vi v8, v28, 28
-; RV64-NEXT:  .LBB16_15:
-; RV64-NEXT:    vmv.x.s a1, v8
-; RV64-NEXT:    vslidedown.vi v8, v30, 27
-; RV64-NEXT:    vmv.x.s a2, v8
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 28(sp)
-; RV64-NEXT:    bnez a2, .LBB16_17
-; RV64-NEXT:  # %bb.16:
-; RV64-NEXT:    vslidedown.vi v8, v26, 27
-; RV64-NEXT:    j .LBB16_18
-; RV64-NEXT:  .LBB16_17:
-; RV64-NEXT:    vslidedown.vi v8, v28, 27
-; RV64-NEXT:  .LBB16_18:
-; RV64-NEXT:    vmv.x.s a1, v8
-; RV64-NEXT:    vslidedown.vi v8, v30, 26
-; RV64-NEXT:    vmv.x.s a2, v8
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 27(sp)
-; RV64-NEXT:    bnez a2, .LBB16_20
-; RV64-NEXT:  # %bb.19:
-; RV64-NEXT:    vslidedown.vi v8, v26, 26
-; RV64-NEXT:    j .LBB16_21
-; RV64-NEXT:  .LBB16_20:
-; RV64-NEXT:    vslidedown.vi v8, v28, 26
-; RV64-NEXT:  .LBB16_21:
-; RV64-NEXT:    vmv.x.s a1, v8
-; RV64-NEXT:    vslidedown.vi v8, v30, 25
-; RV64-NEXT:    vmv.x.s a2, v8
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 26(sp)
-; RV64-NEXT:    bnez a2, .LBB16_23
-; RV64-NEXT:  # %bb.22:
-; RV64-NEXT:    vslidedown.vi v8, v26, 25
-; RV64-NEXT:    j .LBB16_24
-; RV64-NEXT:  .LBB16_23:
-; RV64-NEXT:    vslidedown.vi v8, v28, 25
-; RV64-NEXT:  .LBB16_24:
-; RV64-NEXT:    vmv.x.s a1, v8
-; RV64-NEXT:    vslidedown.vi v8, v30, 24
-; RV64-NEXT:    vmv.x.s a2, v8
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 25(sp)
-; RV64-NEXT:    bnez a2, .LBB16_26
-; RV64-NEXT:  # %bb.25:
-; RV64-NEXT:    vslidedown.vi v8, v26, 24
-; RV64-NEXT:    j .LBB16_27
-; RV64-NEXT:  .LBB16_26:
-; RV64-NEXT:    vslidedown.vi v8, v28, 24
-; RV64-NEXT:  .LBB16_27:
-; RV64-NEXT:    vmv.x.s a1, v8
-; RV64-NEXT:    vslidedown.vi v8, v30, 23
-; RV64-NEXT:    vmv.x.s a2, v8
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 24(sp)
-; RV64-NEXT:    bnez a2, .LBB16_29
-; RV64-NEXT:  # %bb.28:
-; RV64-NEXT:    vslidedown.vi v8, v26, 23
-; RV64-NEXT:    j .LBB16_30
-; RV64-NEXT:  .LBB16_29:
-; RV64-NEXT:    vslidedown.vi v8, v28, 23
-; RV64-NEXT:  .LBB16_30:
-; RV64-NEXT:    vmv.x.s a1, v8
-; RV64-NEXT:    vslidedown.vi v8, v30, 22
-; RV64-NEXT:    vmv.x.s a2, v8
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 23(sp)
-; RV64-NEXT:    bnez a2, .LBB16_32
-; RV64-NEXT:  # %bb.31:
-; RV64-NEXT:    vslidedown.vi v8, v26, 22
-; RV64-NEXT:    j .LBB16_33
-; RV64-NEXT:  .LBB16_32:
-; RV64-NEXT:    vslidedown.vi v8, v28, 22
-; RV64-NEXT:  .LBB16_33:
-; RV64-NEXT:    vmv.x.s a1, v8
-; RV64-NEXT:    vslidedown.vi v8, v30, 21
-; RV64-NEXT:    vmv.x.s a2, v8
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 22(sp)
-; RV64-NEXT:    bnez a2, .LBB16_35
-; RV64-NEXT:  # %bb.34:
-; RV64-NEXT:    vslidedown.vi v8, v26, 21
-; RV64-NEXT:    j .LBB16_36
-; RV64-NEXT:  .LBB16_35:
-; RV64-NEXT:    vslidedown.vi v8, v28, 21
-; RV64-NEXT:  .LBB16_36:
-; RV64-NEXT:    vmv.x.s a1, v8
-; RV64-NEXT:    vslidedown.vi v8, v30, 20
-; RV64-NEXT:    vmv.x.s a2, v8
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 21(sp)
-; RV64-NEXT:    bnez a2, .LBB16_38
-; RV64-NEXT:  # %bb.37:
-; RV64-NEXT:    vslidedown.vi v8, v26, 20
-; RV64-NEXT:    j .LBB16_39
-; RV64-NEXT:  .LBB16_38:
-; RV64-NEXT:    vslidedown.vi v8, v28, 20
-; RV64-NEXT:  .LBB16_39:
-; RV64-NEXT:    vmv.x.s a1, v8
-; RV64-NEXT:    vslidedown.vi v8, v30, 19
-; RV64-NEXT:    vmv.x.s a2, v8
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 20(sp)
-; RV64-NEXT:    bnez a2, .LBB16_41
-; RV64-NEXT:  # %bb.40:
-; RV64-NEXT:    vslidedown.vi v8, v26, 19
-; RV64-NEXT:    j .LBB16_42
-; RV64-NEXT:  .LBB16_41:
-; RV64-NEXT:    vslidedown.vi v8, v28, 19
-; RV64-NEXT:  .LBB16_42:
-; RV64-NEXT:    vmv.x.s a1, v8
-; RV64-NEXT:    vslidedown.vi v8, v30, 18
-; RV64-NEXT:    vmv.x.s a2, v8
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 19(sp)
-; RV64-NEXT:    bnez a2, .LBB16_44
-; RV64-NEXT:  # %bb.43:
-; RV64-NEXT:    vslidedown.vi v8, v26, 18
-; RV64-NEXT:    j .LBB16_45
-; RV64-NEXT:  .LBB16_44:
-; RV64-NEXT:    vslidedown.vi v8, v28, 18
-; RV64-NEXT:  .LBB16_45:
-; RV64-NEXT:    vmv.x.s a1, v8
-; RV64-NEXT:    vslidedown.vi v8, v30, 17
-; RV64-NEXT:    vmv.x.s a2, v8
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 18(sp)
-; RV64-NEXT:    bnez a2, .LBB16_47
-; RV64-NEXT:  # %bb.46:
-; RV64-NEXT:    vslidedown.vi v8, v26, 17
-; RV64-NEXT:    j .LBB16_48
-; RV64-NEXT:  .LBB16_47:
-; RV64-NEXT:    vslidedown.vi v8, v28, 17
-; RV64-NEXT:  .LBB16_48:
-; RV64-NEXT:    vmv.x.s a1, v8
-; RV64-NEXT:    vslidedown.vi v8, v30, 16
-; RV64-NEXT:    vmv.x.s a2, v8
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 17(sp)
-; RV64-NEXT:    bnez a2, .LBB16_50
-; RV64-NEXT:  # %bb.49:
-; RV64-NEXT:    vslidedown.vi v8, v26, 16
-; RV64-NEXT:    j .LBB16_51
-; RV64-NEXT:  .LBB16_50:
-; RV64-NEXT:    vslidedown.vi v8, v28, 16
-; RV64-NEXT:  .LBB16_51:
-; RV64-NEXT:    vmv.x.s a1, v8
-; RV64-NEXT:    vslidedown.vi v8, v30, 15
-; RV64-NEXT:    vmv.x.s a2, v8
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 16(sp)
-; RV64-NEXT:    bnez a2, .LBB16_53
-; RV64-NEXT:  # %bb.52:
-; RV64-NEXT:    vslidedown.vi v8, v26, 15
-; RV64-NEXT:    j .LBB16_54
-; RV64-NEXT:  .LBB16_53:
-; RV64-NEXT:    vslidedown.vi v8, v28, 15
-; RV64-NEXT:  .LBB16_54:
-; RV64-NEXT:    vmv.x.s a1, v8
-; RV64-NEXT:    vslidedown.vi v8, v30, 14
-; RV64-NEXT:    vmv.x.s a2, v8
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 15(sp)
-; RV64-NEXT:    bnez a2, .LBB16_56
-; RV64-NEXT:  # %bb.55:
-; RV64-NEXT:    vslidedown.vi v8, v26, 14
-; RV64-NEXT:    j .LBB16_57
-; RV64-NEXT:  .LBB16_56:
-; RV64-NEXT:    vslidedown.vi v8, v28, 14
-; RV64-NEXT:  .LBB16_57:
-; RV64-NEXT:    vmv.x.s a1, v8
-; RV64-NEXT:    vslidedown.vi v8, v30, 13
-; RV64-NEXT:    vmv.x.s a2, v8
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 14(sp)
-; RV64-NEXT:    bnez a2, .LBB16_59
-; RV64-NEXT:  # %bb.58:
-; RV64-NEXT:    vslidedown.vi v8, v26, 13
-; RV64-NEXT:    j .LBB16_60
-; RV64-NEXT:  .LBB16_59:
-; RV64-NEXT:    vslidedown.vi v8, v28, 13
-; RV64-NEXT:  .LBB16_60:
-; RV64-NEXT:    vmv.x.s a1, v8
-; RV64-NEXT:    vslidedown.vi v8, v30, 12
-; RV64-NEXT:    vmv.x.s a2, v8
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 13(sp)
-; RV64-NEXT:    bnez a2, .LBB16_62
-; RV64-NEXT:  # %bb.61:
-; RV64-NEXT:    vslidedown.vi v8, v26, 12
-; RV64-NEXT:    j .LBB16_63
-; RV64-NEXT:  .LBB16_62:
-; RV64-NEXT:    vslidedown.vi v8, v28, 12
-; RV64-NEXT:  .LBB16_63:
-; RV64-NEXT:    vmv.x.s a1, v8
-; RV64-NEXT:    vslidedown.vi v8, v30, 11
-; RV64-NEXT:    vmv.x.s a2, v8
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 12(sp)
-; RV64-NEXT:    bnez a2, .LBB16_65
-; RV64-NEXT:  # %bb.64:
-; RV64-NEXT:    vslidedown.vi v8, v26, 11
-; RV64-NEXT:    j .LBB16_66
-; RV64-NEXT:  .LBB16_65:
-; RV64-NEXT:    vslidedown.vi v8, v28, 11
-; RV64-NEXT:  .LBB16_66:
-; RV64-NEXT:    vmv.x.s a1, v8
-; RV64-NEXT:    vslidedown.vi v8, v30, 10
-; RV64-NEXT:    vmv.x.s a2, v8
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 11(sp)
-; RV64-NEXT:    bnez a2, .LBB16_68
-; RV64-NEXT:  # %bb.67:
-; RV64-NEXT:    vslidedown.vi v8, v26, 10
-; RV64-NEXT:    j .LBB16_69
-; RV64-NEXT:  .LBB16_68:
-; RV64-NEXT:    vslidedown.vi v8, v28, 10
-; RV64-NEXT:  .LBB16_69:
-; RV64-NEXT:    vmv.x.s a1, v8
-; RV64-NEXT:    vslidedown.vi v8, v30, 9
-; RV64-NEXT:    vmv.x.s a2, v8
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 10(sp)
-; RV64-NEXT:    bnez a2, .LBB16_71
-; RV64-NEXT:  # %bb.70:
-; RV64-NEXT:    vslidedown.vi v8, v26, 9
-; RV64-NEXT:    j .LBB16_72
-; RV64-NEXT:  .LBB16_71:
-; RV64-NEXT:    vslidedown.vi v8, v28, 9
-; RV64-NEXT:  .LBB16_72:
-; RV64-NEXT:    vmv.x.s a1, v8
-; RV64-NEXT:    vslidedown.vi v8, v30, 8
-; RV64-NEXT:    vmv.x.s a2, v8
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 9(sp)
-; RV64-NEXT:    bnez a2, .LBB16_74
-; RV64-NEXT:  # %bb.73:
-; RV64-NEXT:    vslidedown.vi v8, v26, 8
-; RV64-NEXT:    j .LBB16_75
-; RV64-NEXT:  .LBB16_74:
-; RV64-NEXT:    vslidedown.vi v8, v28, 8
-; RV64-NEXT:  .LBB16_75:
-; RV64-NEXT:    vmv.x.s a1, v8
-; RV64-NEXT:    vslidedown.vi v8, v30, 7
-; RV64-NEXT:    vmv.x.s a2, v8
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 8(sp)
-; RV64-NEXT:    bnez a2, .LBB16_77
-; RV64-NEXT:  # %bb.76:
-; RV64-NEXT:    vslidedown.vi v8, v26, 7
-; RV64-NEXT:    j .LBB16_78
-; RV64-NEXT:  .LBB16_77:
-; RV64-NEXT:    vslidedown.vi v8, v28, 7
-; RV64-NEXT:  .LBB16_78:
-; RV64-NEXT:    vmv.x.s a1, v8
-; RV64-NEXT:    vslidedown.vi v8, v30, 6
-; RV64-NEXT:    vmv.x.s a2, v8
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 7(sp)
-; RV64-NEXT:    bnez a2, .LBB16_80
-; RV64-NEXT:  # %bb.79:
-; RV64-NEXT:    vslidedown.vi v8, v26, 6
-; RV64-NEXT:    j .LBB16_81
-; RV64-NEXT:  .LBB16_80:
-; RV64-NEXT:    vslidedown.vi v8, v28, 6
-; RV64-NEXT:  .LBB16_81:
-; RV64-NEXT:    vmv.x.s a1, v8
-; RV64-NEXT:    vslidedown.vi v8, v30, 5
-; RV64-NEXT:    vmv.x.s a2, v8
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 6(sp)
-; RV64-NEXT:    bnez a2, .LBB16_83
-; RV64-NEXT:  # %bb.82:
-; RV64-NEXT:    vslidedown.vi v8, v26, 5
-; RV64-NEXT:    j .LBB16_84
-; RV64-NEXT:  .LBB16_83:
-; RV64-NEXT:    vslidedown.vi v8, v28, 5
-; RV64-NEXT:  .LBB16_84:
-; RV64-NEXT:    vmv.x.s a1, v8
-; RV64-NEXT:    vslidedown.vi v8, v30, 4
-; RV64-NEXT:    vmv.x.s a2, v8
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 5(sp)
-; RV64-NEXT:    bnez a2, .LBB16_86
-; RV64-NEXT:  # %bb.85:
-; RV64-NEXT:    vslidedown.vi v8, v26, 4
-; RV64-NEXT:    j .LBB16_87
-; RV64-NEXT:  .LBB16_86:
-; RV64-NEXT:    vslidedown.vi v8, v28, 4
-; RV64-NEXT:  .LBB16_87:
-; RV64-NEXT:    vmv.x.s a1, v8
-; RV64-NEXT:    vslidedown.vi v8, v30, 3
-; RV64-NEXT:    vmv.x.s a2, v8
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 4(sp)
-; RV64-NEXT:    bnez a2, .LBB16_89
-; RV64-NEXT:  # %bb.88:
-; RV64-NEXT:    vslidedown.vi v8, v26, 3
-; RV64-NEXT:    j .LBB16_90
-; RV64-NEXT:  .LBB16_89:
-; RV64-NEXT:    vslidedown.vi v8, v28, 3
-; RV64-NEXT:  .LBB16_90:
-; RV64-NEXT:    vmv.x.s a1, v8
-; RV64-NEXT:    vslidedown.vi v8, v30, 2
-; RV64-NEXT:    vmv.x.s a2, v8
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 3(sp)
-; RV64-NEXT:    bnez a2, .LBB16_92
-; RV64-NEXT:  # %bb.91:
-; RV64-NEXT:    vslidedown.vi v8, v26, 2
-; RV64-NEXT:    j .LBB16_93
-; RV64-NEXT:  .LBB16_92:
-; RV64-NEXT:    vslidedown.vi v8, v28, 2
-; RV64-NEXT:  .LBB16_93:
-; RV64-NEXT:    vmv.x.s a1, v8
-; RV64-NEXT:    vslidedown.vi v30, v30, 1
-; RV64-NEXT:    vmv.x.s a2, v30
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 2(sp)
-; RV64-NEXT:    bnez a2, .LBB16_95
-; RV64-NEXT:  # %bb.94:
-; RV64-NEXT:    vslidedown.vi v26, v26, 1
-; RV64-NEXT:    j .LBB16_96
-; RV64-NEXT:  .LBB16_95:
-; RV64-NEXT:    vslidedown.vi v26, v28, 1
-; RV64-NEXT:  .LBB16_96:
-; RV64-NEXT:    vmv.x.s a1, v26
-; RV64-NEXT:    sb a1, 1(sp)
-; RV64-NEXT:    vsetvli zero, a0, e8,m2,ta,mu
-; RV64-NEXT:    vle8.v v26, (sp)
-; RV64-NEXT:    vand.vi v26, v26, 1
-; RV64-NEXT:    vmsne.vi v0, v26, 0
-; RV64-NEXT:    addi sp, s0, -64
-; RV64-NEXT:    ld s0, 48(sp) # 8-byte Folded Reload
-; RV64-NEXT:    ld ra, 56(sp) # 8-byte Folded Reload
-; RV64-NEXT:    addi sp, sp, 64
-; RV64-NEXT:    ret
+; CHECK-LABEL: vselect_v32i1:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    addi a0, zero, 32
+; CHECK-NEXT:    vsetvli zero, a0, e8,m2,ta,mu
+; CHECK-NEXT:    vmandnot.mm v25, v8, v9
+; CHECK-NEXT:    vmand.mm v26, v0, v9
+; CHECK-NEXT:    vmor.mm v0, v26, v25
+; CHECK-NEXT:    ret
   %v = select <32 x i1> %cc, <32 x i1> %a, <32 x i1> %b
   ret <32 x i1> %v
 }
 
 define <64 x i1> @vselect_v64i1(<64 x i1> %a, <64 x i1> %b, <64 x i1> %cc) {
-; RV32-LABEL: vselect_v64i1:
-; RV32:       # %bb.0:
-; RV32-NEXT:    addi sp, sp, -128
-; RV32-NEXT:    .cfi_def_cfa_offset 128
-; RV32-NEXT:    sw ra, 124(sp) # 4-byte Folded Spill
-; RV32-NEXT:    sw s0, 120(sp) # 4-byte Folded Spill
-; RV32-NEXT:    .cfi_offset ra, -4
-; RV32-NEXT:    .cfi_offset s0, -8
-; RV32-NEXT:    addi s0, sp, 128
-; RV32-NEXT:    .cfi_def_cfa s0, 0
-; RV32-NEXT:    andi sp, sp, -64
-; RV32-NEXT:    vmv1r.v v25, v0
-; RV32-NEXT:    addi a0, zero, 64
-; RV32-NEXT:    vsetvli zero, a0, e8,m4,ta,mu
-; RV32-NEXT:    vmv.v.i v16, 0
-; RV32-NEXT:    vmv1r.v v0, v8
-; RV32-NEXT:    vmerge.vim v28, v16, 1, v0
-; RV32-NEXT:    vmv1r.v v0, v9
-; RV32-NEXT:    vmerge.vim v12, v16, 1, v0
-; RV32-NEXT:    vmv.x.s a1, v12
-; RV32-NEXT:    andi a1, a1, 1
-; RV32-NEXT:    vmv1r.v v0, v25
-; RV32-NEXT:    vmerge.vim v8, v16, 1, v0
-; RV32-NEXT:    bnez a1, .LBB17_2
-; RV32-NEXT:  # %bb.1:
-; RV32-NEXT:    vmv.x.s a1, v28
-; RV32-NEXT:    j .LBB17_3
-; RV32-NEXT:  .LBB17_2:
-; RV32-NEXT:    vmv.x.s a1, v8
-; RV32-NEXT:  .LBB17_3:
-; RV32-NEXT:    sb a1, 0(sp)
-; RV32-NEXT:    addi a1, zero, 63
-; RV32-NEXT:    vsetivli zero, 1, e8,m4,ta,mu
-; RV32-NEXT:    vslidedown.vx v16, v12, a1
-; RV32-NEXT:    vmv.x.s a2, v16
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    bnez a2, .LBB17_5
-; RV32-NEXT:  # %bb.4:
-; RV32-NEXT:    vslidedown.vx v16, v28, a1
-; RV32-NEXT:    j .LBB17_6
-; RV32-NEXT:  .LBB17_5:
-; RV32-NEXT:    vslidedown.vx v16, v8, a1
-; RV32-NEXT:  .LBB17_6:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    addi a2, zero, 62
-; RV32-NEXT:    vslidedown.vx v16, v12, a2
-; RV32-NEXT:    vmv.x.s a3, v16
-; RV32-NEXT:    andi a3, a3, 1
-; RV32-NEXT:    sb a1, 63(sp)
-; RV32-NEXT:    bnez a3, .LBB17_8
-; RV32-NEXT:  # %bb.7:
-; RV32-NEXT:    vslidedown.vx v16, v28, a2
-; RV32-NEXT:    j .LBB17_9
-; RV32-NEXT:  .LBB17_8:
-; RV32-NEXT:    vslidedown.vx v16, v8, a2
-; RV32-NEXT:  .LBB17_9:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    addi a2, zero, 61
-; RV32-NEXT:    vslidedown.vx v16, v12, a2
-; RV32-NEXT:    vmv.x.s a3, v16
-; RV32-NEXT:    andi a3, a3, 1
-; RV32-NEXT:    sb a1, 62(sp)
-; RV32-NEXT:    bnez a3, .LBB17_11
-; RV32-NEXT:  # %bb.10:
-; RV32-NEXT:    vslidedown.vx v16, v28, a2
-; RV32-NEXT:    j .LBB17_12
-; RV32-NEXT:  .LBB17_11:
-; RV32-NEXT:    vslidedown.vx v16, v8, a2
-; RV32-NEXT:  .LBB17_12:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    addi a2, zero, 60
-; RV32-NEXT:    vslidedown.vx v16, v12, a2
-; RV32-NEXT:    vmv.x.s a3, v16
-; RV32-NEXT:    andi a3, a3, 1
-; RV32-NEXT:    sb a1, 61(sp)
-; RV32-NEXT:    bnez a3, .LBB17_14
-; RV32-NEXT:  # %bb.13:
-; RV32-NEXT:    vslidedown.vx v16, v28, a2
-; RV32-NEXT:    j .LBB17_15
-; RV32-NEXT:  .LBB17_14:
-; RV32-NEXT:    vslidedown.vx v16, v8, a2
-; RV32-NEXT:  .LBB17_15:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    addi a2, zero, 59
-; RV32-NEXT:    vslidedown.vx v16, v12, a2
-; RV32-NEXT:    vmv.x.s a3, v16
-; RV32-NEXT:    andi a3, a3, 1
-; RV32-NEXT:    sb a1, 60(sp)
-; RV32-NEXT:    bnez a3, .LBB17_17
-; RV32-NEXT:  # %bb.16:
-; RV32-NEXT:    vslidedown.vx v16, v28, a2
-; RV32-NEXT:    j .LBB17_18
-; RV32-NEXT:  .LBB17_17:
-; RV32-NEXT:    vslidedown.vx v16, v8, a2
-; RV32-NEXT:  .LBB17_18:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    addi a2, zero, 58
-; RV32-NEXT:    vslidedown.vx v16, v12, a2
-; RV32-NEXT:    vmv.x.s a3, v16
-; RV32-NEXT:    andi a3, a3, 1
-; RV32-NEXT:    sb a1, 59(sp)
-; RV32-NEXT:    bnez a3, .LBB17_20
-; RV32-NEXT:  # %bb.19:
-; RV32-NEXT:    vslidedown.vx v16, v28, a2
-; RV32-NEXT:    j .LBB17_21
-; RV32-NEXT:  .LBB17_20:
-; RV32-NEXT:    vslidedown.vx v16, v8, a2
-; RV32-NEXT:  .LBB17_21:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    addi a2, zero, 57
-; RV32-NEXT:    vslidedown.vx v16, v12, a2
-; RV32-NEXT:    vmv.x.s a3, v16
-; RV32-NEXT:    andi a3, a3, 1
-; RV32-NEXT:    sb a1, 58(sp)
-; RV32-NEXT:    bnez a3, .LBB17_23
-; RV32-NEXT:  # %bb.22:
-; RV32-NEXT:    vslidedown.vx v16, v28, a2
-; RV32-NEXT:    j .LBB17_24
-; RV32-NEXT:  .LBB17_23:
-; RV32-NEXT:    vslidedown.vx v16, v8, a2
-; RV32-NEXT:  .LBB17_24:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    addi a2, zero, 56
-; RV32-NEXT:    vslidedown.vx v16, v12, a2
-; RV32-NEXT:    vmv.x.s a3, v16
-; RV32-NEXT:    andi a3, a3, 1
-; RV32-NEXT:    sb a1, 57(sp)
-; RV32-NEXT:    bnez a3, .LBB17_26
-; RV32-NEXT:  # %bb.25:
-; RV32-NEXT:    vslidedown.vx v16, v28, a2
-; RV32-NEXT:    j .LBB17_27
-; RV32-NEXT:  .LBB17_26:
-; RV32-NEXT:    vslidedown.vx v16, v8, a2
-; RV32-NEXT:  .LBB17_27:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    addi a2, zero, 55
-; RV32-NEXT:    vslidedown.vx v16, v12, a2
-; RV32-NEXT:    vmv.x.s a3, v16
-; RV32-NEXT:    andi a3, a3, 1
-; RV32-NEXT:    sb a1, 56(sp)
-; RV32-NEXT:    bnez a3, .LBB17_29
-; RV32-NEXT:  # %bb.28:
-; RV32-NEXT:    vslidedown.vx v16, v28, a2
-; RV32-NEXT:    j .LBB17_30
-; RV32-NEXT:  .LBB17_29:
-; RV32-NEXT:    vslidedown.vx v16, v8, a2
-; RV32-NEXT:  .LBB17_30:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    addi a2, zero, 54
-; RV32-NEXT:    vslidedown.vx v16, v12, a2
-; RV32-NEXT:    vmv.x.s a3, v16
-; RV32-NEXT:    andi a3, a3, 1
-; RV32-NEXT:    sb a1, 55(sp)
-; RV32-NEXT:    bnez a3, .LBB17_32
-; RV32-NEXT:  # %bb.31:
-; RV32-NEXT:    vslidedown.vx v16, v28, a2
-; RV32-NEXT:    j .LBB17_33
-; RV32-NEXT:  .LBB17_32:
-; RV32-NEXT:    vslidedown.vx v16, v8, a2
-; RV32-NEXT:  .LBB17_33:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    addi a2, zero, 53
-; RV32-NEXT:    vslidedown.vx v16, v12, a2
-; RV32-NEXT:    vmv.x.s a3, v16
-; RV32-NEXT:    andi a3, a3, 1
-; RV32-NEXT:    sb a1, 54(sp)
-; RV32-NEXT:    bnez a3, .LBB17_35
-; RV32-NEXT:  # %bb.34:
-; RV32-NEXT:    vslidedown.vx v16, v28, a2
-; RV32-NEXT:    j .LBB17_36
-; RV32-NEXT:  .LBB17_35:
-; RV32-NEXT:    vslidedown.vx v16, v8, a2
-; RV32-NEXT:  .LBB17_36:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    addi a2, zero, 52
-; RV32-NEXT:    vslidedown.vx v16, v12, a2
-; RV32-NEXT:    vmv.x.s a3, v16
-; RV32-NEXT:    andi a3, a3, 1
-; RV32-NEXT:    sb a1, 53(sp)
-; RV32-NEXT:    bnez a3, .LBB17_38
-; RV32-NEXT:  # %bb.37:
-; RV32-NEXT:    vslidedown.vx v16, v28, a2
-; RV32-NEXT:    j .LBB17_39
-; RV32-NEXT:  .LBB17_38:
-; RV32-NEXT:    vslidedown.vx v16, v8, a2
-; RV32-NEXT:  .LBB17_39:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    addi a2, zero, 51
-; RV32-NEXT:    vslidedown.vx v16, v12, a2
-; RV32-NEXT:    vmv.x.s a3, v16
-; RV32-NEXT:    andi a3, a3, 1
-; RV32-NEXT:    sb a1, 52(sp)
-; RV32-NEXT:    bnez a3, .LBB17_41
-; RV32-NEXT:  # %bb.40:
-; RV32-NEXT:    vslidedown.vx v16, v28, a2
-; RV32-NEXT:    j .LBB17_42
-; RV32-NEXT:  .LBB17_41:
-; RV32-NEXT:    vslidedown.vx v16, v8, a2
-; RV32-NEXT:  .LBB17_42:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    addi a2, zero, 50
-; RV32-NEXT:    vslidedown.vx v16, v12, a2
-; RV32-NEXT:    vmv.x.s a3, v16
-; RV32-NEXT:    andi a3, a3, 1
-; RV32-NEXT:    sb a1, 51(sp)
-; RV32-NEXT:    bnez a3, .LBB17_44
-; RV32-NEXT:  # %bb.43:
-; RV32-NEXT:    vslidedown.vx v16, v28, a2
-; RV32-NEXT:    j .LBB17_45
-; RV32-NEXT:  .LBB17_44:
-; RV32-NEXT:    vslidedown.vx v16, v8, a2
-; RV32-NEXT:  .LBB17_45:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    addi a2, zero, 49
-; RV32-NEXT:    vslidedown.vx v16, v12, a2
-; RV32-NEXT:    vmv.x.s a3, v16
-; RV32-NEXT:    andi a3, a3, 1
-; RV32-NEXT:    sb a1, 50(sp)
-; RV32-NEXT:    bnez a3, .LBB17_47
-; RV32-NEXT:  # %bb.46:
-; RV32-NEXT:    vslidedown.vx v16, v28, a2
-; RV32-NEXT:    j .LBB17_48
-; RV32-NEXT:  .LBB17_47:
-; RV32-NEXT:    vslidedown.vx v16, v8, a2
-; RV32-NEXT:  .LBB17_48:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    addi a2, zero, 48
-; RV32-NEXT:    vslidedown.vx v16, v12, a2
-; RV32-NEXT:    vmv.x.s a3, v16
-; RV32-NEXT:    andi a3, a3, 1
-; RV32-NEXT:    sb a1, 49(sp)
-; RV32-NEXT:    bnez a3, .LBB17_50
-; RV32-NEXT:  # %bb.49:
-; RV32-NEXT:    vslidedown.vx v16, v28, a2
-; RV32-NEXT:    j .LBB17_51
-; RV32-NEXT:  .LBB17_50:
-; RV32-NEXT:    vslidedown.vx v16, v8, a2
-; RV32-NEXT:  .LBB17_51:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    addi a2, zero, 47
-; RV32-NEXT:    vslidedown.vx v16, v12, a2
-; RV32-NEXT:    vmv.x.s a3, v16
-; RV32-NEXT:    andi a3, a3, 1
-; RV32-NEXT:    sb a1, 48(sp)
-; RV32-NEXT:    bnez a3, .LBB17_53
-; RV32-NEXT:  # %bb.52:
-; RV32-NEXT:    vslidedown.vx v16, v28, a2
-; RV32-NEXT:    j .LBB17_54
-; RV32-NEXT:  .LBB17_53:
-; RV32-NEXT:    vslidedown.vx v16, v8, a2
-; RV32-NEXT:  .LBB17_54:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    addi a2, zero, 46
-; RV32-NEXT:    vslidedown.vx v16, v12, a2
-; RV32-NEXT:    vmv.x.s a3, v16
-; RV32-NEXT:    andi a3, a3, 1
-; RV32-NEXT:    sb a1, 47(sp)
-; RV32-NEXT:    bnez a3, .LBB17_56
-; RV32-NEXT:  # %bb.55:
-; RV32-NEXT:    vslidedown.vx v16, v28, a2
-; RV32-NEXT:    j .LBB17_57
-; RV32-NEXT:  .LBB17_56:
-; RV32-NEXT:    vslidedown.vx v16, v8, a2
-; RV32-NEXT:  .LBB17_57:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    addi a2, zero, 45
-; RV32-NEXT:    vslidedown.vx v16, v12, a2
-; RV32-NEXT:    vmv.x.s a3, v16
-; RV32-NEXT:    andi a3, a3, 1
-; RV32-NEXT:    sb a1, 46(sp)
-; RV32-NEXT:    bnez a3, .LBB17_59
-; RV32-NEXT:  # %bb.58:
-; RV32-NEXT:    vslidedown.vx v16, v28, a2
-; RV32-NEXT:    j .LBB17_60
-; RV32-NEXT:  .LBB17_59:
-; RV32-NEXT:    vslidedown.vx v16, v8, a2
-; RV32-NEXT:  .LBB17_60:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    addi a2, zero, 44
-; RV32-NEXT:    vslidedown.vx v16, v12, a2
-; RV32-NEXT:    vmv.x.s a3, v16
-; RV32-NEXT:    andi a3, a3, 1
-; RV32-NEXT:    sb a1, 45(sp)
-; RV32-NEXT:    bnez a3, .LBB17_62
-; RV32-NEXT:  # %bb.61:
-; RV32-NEXT:    vslidedown.vx v16, v28, a2
-; RV32-NEXT:    j .LBB17_63
-; RV32-NEXT:  .LBB17_62:
-; RV32-NEXT:    vslidedown.vx v16, v8, a2
-; RV32-NEXT:  .LBB17_63:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    addi a2, zero, 43
-; RV32-NEXT:    vslidedown.vx v16, v12, a2
-; RV32-NEXT:    vmv.x.s a3, v16
-; RV32-NEXT:    andi a3, a3, 1
-; RV32-NEXT:    sb a1, 44(sp)
-; RV32-NEXT:    bnez a3, .LBB17_65
-; RV32-NEXT:  # %bb.64:
-; RV32-NEXT:    vslidedown.vx v16, v28, a2
-; RV32-NEXT:    j .LBB17_66
-; RV32-NEXT:  .LBB17_65:
-; RV32-NEXT:    vslidedown.vx v16, v8, a2
-; RV32-NEXT:  .LBB17_66:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    addi a2, zero, 42
-; RV32-NEXT:    vslidedown.vx v16, v12, a2
-; RV32-NEXT:    vmv.x.s a3, v16
-; RV32-NEXT:    andi a3, a3, 1
-; RV32-NEXT:    sb a1, 43(sp)
-; RV32-NEXT:    bnez a3, .LBB17_68
-; RV32-NEXT:  # %bb.67:
-; RV32-NEXT:    vslidedown.vx v16, v28, a2
-; RV32-NEXT:    j .LBB17_69
-; RV32-NEXT:  .LBB17_68:
-; RV32-NEXT:    vslidedown.vx v16, v8, a2
-; RV32-NEXT:  .LBB17_69:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    addi a2, zero, 41
-; RV32-NEXT:    vslidedown.vx v16, v12, a2
-; RV32-NEXT:    vmv.x.s a3, v16
-; RV32-NEXT:    andi a3, a3, 1
-; RV32-NEXT:    sb a1, 42(sp)
-; RV32-NEXT:    bnez a3, .LBB17_71
-; RV32-NEXT:  # %bb.70:
-; RV32-NEXT:    vslidedown.vx v16, v28, a2
-; RV32-NEXT:    j .LBB17_72
-; RV32-NEXT:  .LBB17_71:
-; RV32-NEXT:    vslidedown.vx v16, v8, a2
-; RV32-NEXT:  .LBB17_72:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    addi a2, zero, 40
-; RV32-NEXT:    vslidedown.vx v16, v12, a2
-; RV32-NEXT:    vmv.x.s a3, v16
-; RV32-NEXT:    andi a3, a3, 1
-; RV32-NEXT:    sb a1, 41(sp)
-; RV32-NEXT:    bnez a3, .LBB17_74
-; RV32-NEXT:  # %bb.73:
-; RV32-NEXT:    vslidedown.vx v16, v28, a2
-; RV32-NEXT:    j .LBB17_75
-; RV32-NEXT:  .LBB17_74:
-; RV32-NEXT:    vslidedown.vx v16, v8, a2
-; RV32-NEXT:  .LBB17_75:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    addi a2, zero, 39
-; RV32-NEXT:    vslidedown.vx v16, v12, a2
-; RV32-NEXT:    vmv.x.s a3, v16
-; RV32-NEXT:    andi a3, a3, 1
-; RV32-NEXT:    sb a1, 40(sp)
-; RV32-NEXT:    bnez a3, .LBB17_77
-; RV32-NEXT:  # %bb.76:
-; RV32-NEXT:    vslidedown.vx v16, v28, a2
-; RV32-NEXT:    j .LBB17_78
-; RV32-NEXT:  .LBB17_77:
-; RV32-NEXT:    vslidedown.vx v16, v8, a2
-; RV32-NEXT:  .LBB17_78:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    addi a2, zero, 38
-; RV32-NEXT:    vslidedown.vx v16, v12, a2
-; RV32-NEXT:    vmv.x.s a3, v16
-; RV32-NEXT:    andi a3, a3, 1
-; RV32-NEXT:    sb a1, 39(sp)
-; RV32-NEXT:    bnez a3, .LBB17_80
-; RV32-NEXT:  # %bb.79:
-; RV32-NEXT:    vslidedown.vx v16, v28, a2
-; RV32-NEXT:    j .LBB17_81
-; RV32-NEXT:  .LBB17_80:
-; RV32-NEXT:    vslidedown.vx v16, v8, a2
-; RV32-NEXT:  .LBB17_81:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    addi a2, zero, 37
-; RV32-NEXT:    vslidedown.vx v16, v12, a2
-; RV32-NEXT:    vmv.x.s a3, v16
-; RV32-NEXT:    andi a3, a3, 1
-; RV32-NEXT:    sb a1, 38(sp)
-; RV32-NEXT:    bnez a3, .LBB17_83
-; RV32-NEXT:  # %bb.82:
-; RV32-NEXT:    vslidedown.vx v16, v28, a2
-; RV32-NEXT:    j .LBB17_84
-; RV32-NEXT:  .LBB17_83:
-; RV32-NEXT:    vslidedown.vx v16, v8, a2
-; RV32-NEXT:  .LBB17_84:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    addi a2, zero, 36
-; RV32-NEXT:    vslidedown.vx v16, v12, a2
-; RV32-NEXT:    vmv.x.s a3, v16
-; RV32-NEXT:    andi a3, a3, 1
-; RV32-NEXT:    sb a1, 37(sp)
-; RV32-NEXT:    bnez a3, .LBB17_86
-; RV32-NEXT:  # %bb.85:
-; RV32-NEXT:    vslidedown.vx v16, v28, a2
-; RV32-NEXT:    j .LBB17_87
-; RV32-NEXT:  .LBB17_86:
-; RV32-NEXT:    vslidedown.vx v16, v8, a2
-; RV32-NEXT:  .LBB17_87:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    addi a2, zero, 35
-; RV32-NEXT:    vslidedown.vx v16, v12, a2
-; RV32-NEXT:    vmv.x.s a3, v16
-; RV32-NEXT:    andi a3, a3, 1
-; RV32-NEXT:    sb a1, 36(sp)
-; RV32-NEXT:    bnez a3, .LBB17_89
-; RV32-NEXT:  # %bb.88:
-; RV32-NEXT:    vslidedown.vx v16, v28, a2
-; RV32-NEXT:    j .LBB17_90
-; RV32-NEXT:  .LBB17_89:
-; RV32-NEXT:    vslidedown.vx v16, v8, a2
-; RV32-NEXT:  .LBB17_90:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    addi a2, zero, 34
-; RV32-NEXT:    vslidedown.vx v16, v12, a2
-; RV32-NEXT:    vmv.x.s a3, v16
-; RV32-NEXT:    andi a3, a3, 1
-; RV32-NEXT:    sb a1, 35(sp)
-; RV32-NEXT:    bnez a3, .LBB17_92
-; RV32-NEXT:  # %bb.91:
-; RV32-NEXT:    vslidedown.vx v16, v28, a2
-; RV32-NEXT:    j .LBB17_93
-; RV32-NEXT:  .LBB17_92:
-; RV32-NEXT:    vslidedown.vx v16, v8, a2
-; RV32-NEXT:  .LBB17_93:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    addi a2, zero, 33
-; RV32-NEXT:    vslidedown.vx v16, v12, a2
-; RV32-NEXT:    vmv.x.s a3, v16
-; RV32-NEXT:    andi a3, a3, 1
-; RV32-NEXT:    sb a1, 34(sp)
-; RV32-NEXT:    bnez a3, .LBB17_95
-; RV32-NEXT:  # %bb.94:
-; RV32-NEXT:    vslidedown.vx v16, v28, a2
-; RV32-NEXT:    j .LBB17_96
-; RV32-NEXT:  .LBB17_95:
-; RV32-NEXT:    vslidedown.vx v16, v8, a2
-; RV32-NEXT:  .LBB17_96:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    addi a2, zero, 32
-; RV32-NEXT:    vslidedown.vx v16, v12, a2
-; RV32-NEXT:    vmv.x.s a3, v16
-; RV32-NEXT:    andi a3, a3, 1
-; RV32-NEXT:    sb a1, 33(sp)
-; RV32-NEXT:    bnez a3, .LBB17_98
-; RV32-NEXT:  # %bb.97:
-; RV32-NEXT:    vslidedown.vx v16, v28, a2
-; RV32-NEXT:    j .LBB17_99
-; RV32-NEXT:  .LBB17_98:
-; RV32-NEXT:    vslidedown.vx v16, v8, a2
-; RV32-NEXT:  .LBB17_99:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    vslidedown.vi v16, v12, 31
-; RV32-NEXT:    vmv.x.s a2, v16
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 32(sp)
-; RV32-NEXT:    bnez a2, .LBB17_101
-; RV32-NEXT:  # %bb.100:
-; RV32-NEXT:    vslidedown.vi v16, v28, 31
-; RV32-NEXT:    j .LBB17_102
-; RV32-NEXT:  .LBB17_101:
-; RV32-NEXT:    vslidedown.vi v16, v8, 31
-; RV32-NEXT:  .LBB17_102:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    vslidedown.vi v16, v12, 30
-; RV32-NEXT:    vmv.x.s a2, v16
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 31(sp)
-; RV32-NEXT:    bnez a2, .LBB17_104
-; RV32-NEXT:  # %bb.103:
-; RV32-NEXT:    vslidedown.vi v16, v28, 30
-; RV32-NEXT:    j .LBB17_105
-; RV32-NEXT:  .LBB17_104:
-; RV32-NEXT:    vslidedown.vi v16, v8, 30
-; RV32-NEXT:  .LBB17_105:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    vslidedown.vi v16, v12, 29
-; RV32-NEXT:    vmv.x.s a2, v16
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 30(sp)
-; RV32-NEXT:    bnez a2, .LBB17_107
-; RV32-NEXT:  # %bb.106:
-; RV32-NEXT:    vslidedown.vi v16, v28, 29
-; RV32-NEXT:    j .LBB17_108
-; RV32-NEXT:  .LBB17_107:
-; RV32-NEXT:    vslidedown.vi v16, v8, 29
-; RV32-NEXT:  .LBB17_108:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    vslidedown.vi v16, v12, 28
-; RV32-NEXT:    vmv.x.s a2, v16
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 29(sp)
-; RV32-NEXT:    bnez a2, .LBB17_110
-; RV32-NEXT:  # %bb.109:
-; RV32-NEXT:    vslidedown.vi v16, v28, 28
-; RV32-NEXT:    j .LBB17_111
-; RV32-NEXT:  .LBB17_110:
-; RV32-NEXT:    vslidedown.vi v16, v8, 28
-; RV32-NEXT:  .LBB17_111:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    vslidedown.vi v16, v12, 27
-; RV32-NEXT:    vmv.x.s a2, v16
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 28(sp)
-; RV32-NEXT:    bnez a2, .LBB17_113
-; RV32-NEXT:  # %bb.112:
-; RV32-NEXT:    vslidedown.vi v16, v28, 27
-; RV32-NEXT:    j .LBB17_114
-; RV32-NEXT:  .LBB17_113:
-; RV32-NEXT:    vslidedown.vi v16, v8, 27
-; RV32-NEXT:  .LBB17_114:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    vslidedown.vi v16, v12, 26
-; RV32-NEXT:    vmv.x.s a2, v16
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 27(sp)
-; RV32-NEXT:    bnez a2, .LBB17_116
-; RV32-NEXT:  # %bb.115:
-; RV32-NEXT:    vslidedown.vi v16, v28, 26
-; RV32-NEXT:    j .LBB17_117
-; RV32-NEXT:  .LBB17_116:
-; RV32-NEXT:    vslidedown.vi v16, v8, 26
-; RV32-NEXT:  .LBB17_117:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    vslidedown.vi v16, v12, 25
-; RV32-NEXT:    vmv.x.s a2, v16
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 26(sp)
-; RV32-NEXT:    bnez a2, .LBB17_119
-; RV32-NEXT:  # %bb.118:
-; RV32-NEXT:    vslidedown.vi v16, v28, 25
-; RV32-NEXT:    j .LBB17_120
-; RV32-NEXT:  .LBB17_119:
-; RV32-NEXT:    vslidedown.vi v16, v8, 25
-; RV32-NEXT:  .LBB17_120:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    vslidedown.vi v16, v12, 24
-; RV32-NEXT:    vmv.x.s a2, v16
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 25(sp)
-; RV32-NEXT:    bnez a2, .LBB17_122
-; RV32-NEXT:  # %bb.121:
-; RV32-NEXT:    vslidedown.vi v16, v28, 24
-; RV32-NEXT:    j .LBB17_123
-; RV32-NEXT:  .LBB17_122:
-; RV32-NEXT:    vslidedown.vi v16, v8, 24
-; RV32-NEXT:  .LBB17_123:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    vslidedown.vi v16, v12, 23
-; RV32-NEXT:    vmv.x.s a2, v16
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 24(sp)
-; RV32-NEXT:    bnez a2, .LBB17_125
-; RV32-NEXT:  # %bb.124:
-; RV32-NEXT:    vslidedown.vi v16, v28, 23
-; RV32-NEXT:    j .LBB17_126
-; RV32-NEXT:  .LBB17_125:
-; RV32-NEXT:    vslidedown.vi v16, v8, 23
-; RV32-NEXT:  .LBB17_126:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    vslidedown.vi v16, v12, 22
-; RV32-NEXT:    vmv.x.s a2, v16
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 23(sp)
-; RV32-NEXT:    bnez a2, .LBB17_128
-; RV32-NEXT:  # %bb.127:
-; RV32-NEXT:    vslidedown.vi v16, v28, 22
-; RV32-NEXT:    j .LBB17_129
-; RV32-NEXT:  .LBB17_128:
-; RV32-NEXT:    vslidedown.vi v16, v8, 22
-; RV32-NEXT:  .LBB17_129:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    vslidedown.vi v16, v12, 21
-; RV32-NEXT:    vmv.x.s a2, v16
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 22(sp)
-; RV32-NEXT:    bnez a2, .LBB17_131
-; RV32-NEXT:  # %bb.130:
-; RV32-NEXT:    vslidedown.vi v16, v28, 21
-; RV32-NEXT:    j .LBB17_132
-; RV32-NEXT:  .LBB17_131:
-; RV32-NEXT:    vslidedown.vi v16, v8, 21
-; RV32-NEXT:  .LBB17_132:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    vslidedown.vi v16, v12, 20
-; RV32-NEXT:    vmv.x.s a2, v16
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 21(sp)
-; RV32-NEXT:    bnez a2, .LBB17_134
-; RV32-NEXT:  # %bb.133:
-; RV32-NEXT:    vslidedown.vi v16, v28, 20
-; RV32-NEXT:    j .LBB17_135
-; RV32-NEXT:  .LBB17_134:
-; RV32-NEXT:    vslidedown.vi v16, v8, 20
-; RV32-NEXT:  .LBB17_135:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    vslidedown.vi v16, v12, 19
-; RV32-NEXT:    vmv.x.s a2, v16
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 20(sp)
-; RV32-NEXT:    bnez a2, .LBB17_137
-; RV32-NEXT:  # %bb.136:
-; RV32-NEXT:    vslidedown.vi v16, v28, 19
-; RV32-NEXT:    j .LBB17_138
-; RV32-NEXT:  .LBB17_137:
-; RV32-NEXT:    vslidedown.vi v16, v8, 19
-; RV32-NEXT:  .LBB17_138:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    vslidedown.vi v16, v12, 18
-; RV32-NEXT:    vmv.x.s a2, v16
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 19(sp)
-; RV32-NEXT:    bnez a2, .LBB17_140
-; RV32-NEXT:  # %bb.139:
-; RV32-NEXT:    vslidedown.vi v16, v28, 18
-; RV32-NEXT:    j .LBB17_141
-; RV32-NEXT:  .LBB17_140:
-; RV32-NEXT:    vslidedown.vi v16, v8, 18
-; RV32-NEXT:  .LBB17_141:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    vslidedown.vi v16, v12, 17
-; RV32-NEXT:    vmv.x.s a2, v16
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 18(sp)
-; RV32-NEXT:    bnez a2, .LBB17_143
-; RV32-NEXT:  # %bb.142:
-; RV32-NEXT:    vslidedown.vi v16, v28, 17
-; RV32-NEXT:    j .LBB17_144
-; RV32-NEXT:  .LBB17_143:
-; RV32-NEXT:    vslidedown.vi v16, v8, 17
-; RV32-NEXT:  .LBB17_144:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    vslidedown.vi v16, v12, 16
-; RV32-NEXT:    vmv.x.s a2, v16
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 17(sp)
-; RV32-NEXT:    bnez a2, .LBB17_146
-; RV32-NEXT:  # %bb.145:
-; RV32-NEXT:    vslidedown.vi v16, v28, 16
-; RV32-NEXT:    j .LBB17_147
-; RV32-NEXT:  .LBB17_146:
-; RV32-NEXT:    vslidedown.vi v16, v8, 16
-; RV32-NEXT:  .LBB17_147:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    vslidedown.vi v16, v12, 15
-; RV32-NEXT:    vmv.x.s a2, v16
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 16(sp)
-; RV32-NEXT:    bnez a2, .LBB17_149
-; RV32-NEXT:  # %bb.148:
-; RV32-NEXT:    vslidedown.vi v16, v28, 15
-; RV32-NEXT:    j .LBB17_150
-; RV32-NEXT:  .LBB17_149:
-; RV32-NEXT:    vslidedown.vi v16, v8, 15
-; RV32-NEXT:  .LBB17_150:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    vslidedown.vi v16, v12, 14
-; RV32-NEXT:    vmv.x.s a2, v16
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 15(sp)
-; RV32-NEXT:    bnez a2, .LBB17_152
-; RV32-NEXT:  # %bb.151:
-; RV32-NEXT:    vslidedown.vi v16, v28, 14
-; RV32-NEXT:    j .LBB17_153
-; RV32-NEXT:  .LBB17_152:
-; RV32-NEXT:    vslidedown.vi v16, v8, 14
-; RV32-NEXT:  .LBB17_153:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    vslidedown.vi v16, v12, 13
-; RV32-NEXT:    vmv.x.s a2, v16
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 14(sp)
-; RV32-NEXT:    bnez a2, .LBB17_155
-; RV32-NEXT:  # %bb.154:
-; RV32-NEXT:    vslidedown.vi v16, v28, 13
-; RV32-NEXT:    j .LBB17_156
-; RV32-NEXT:  .LBB17_155:
-; RV32-NEXT:    vslidedown.vi v16, v8, 13
-; RV32-NEXT:  .LBB17_156:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    vslidedown.vi v16, v12, 12
-; RV32-NEXT:    vmv.x.s a2, v16
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 13(sp)
-; RV32-NEXT:    bnez a2, .LBB17_158
-; RV32-NEXT:  # %bb.157:
-; RV32-NEXT:    vslidedown.vi v16, v28, 12
-; RV32-NEXT:    j .LBB17_159
-; RV32-NEXT:  .LBB17_158:
-; RV32-NEXT:    vslidedown.vi v16, v8, 12
-; RV32-NEXT:  .LBB17_159:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    vslidedown.vi v16, v12, 11
-; RV32-NEXT:    vmv.x.s a2, v16
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 12(sp)
-; RV32-NEXT:    bnez a2, .LBB17_161
-; RV32-NEXT:  # %bb.160:
-; RV32-NEXT:    vslidedown.vi v16, v28, 11
-; RV32-NEXT:    j .LBB17_162
-; RV32-NEXT:  .LBB17_161:
-; RV32-NEXT:    vslidedown.vi v16, v8, 11
-; RV32-NEXT:  .LBB17_162:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    vslidedown.vi v16, v12, 10
-; RV32-NEXT:    vmv.x.s a2, v16
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 11(sp)
-; RV32-NEXT:    bnez a2, .LBB17_164
-; RV32-NEXT:  # %bb.163:
-; RV32-NEXT:    vslidedown.vi v16, v28, 10
-; RV32-NEXT:    j .LBB17_165
-; RV32-NEXT:  .LBB17_164:
-; RV32-NEXT:    vslidedown.vi v16, v8, 10
-; RV32-NEXT:  .LBB17_165:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    vslidedown.vi v16, v12, 9
-; RV32-NEXT:    vmv.x.s a2, v16
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 10(sp)
-; RV32-NEXT:    bnez a2, .LBB17_167
-; RV32-NEXT:  # %bb.166:
-; RV32-NEXT:    vslidedown.vi v16, v28, 9
-; RV32-NEXT:    j .LBB17_168
-; RV32-NEXT:  .LBB17_167:
-; RV32-NEXT:    vslidedown.vi v16, v8, 9
-; RV32-NEXT:  .LBB17_168:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    vslidedown.vi v16, v12, 8
-; RV32-NEXT:    vmv.x.s a2, v16
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 9(sp)
-; RV32-NEXT:    bnez a2, .LBB17_170
-; RV32-NEXT:  # %bb.169:
-; RV32-NEXT:    vslidedown.vi v16, v28, 8
-; RV32-NEXT:    j .LBB17_171
-; RV32-NEXT:  .LBB17_170:
-; RV32-NEXT:    vslidedown.vi v16, v8, 8
-; RV32-NEXT:  .LBB17_171:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    vslidedown.vi v16, v12, 7
-; RV32-NEXT:    vmv.x.s a2, v16
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 8(sp)
-; RV32-NEXT:    bnez a2, .LBB17_173
-; RV32-NEXT:  # %bb.172:
-; RV32-NEXT:    vslidedown.vi v16, v28, 7
-; RV32-NEXT:    j .LBB17_174
-; RV32-NEXT:  .LBB17_173:
-; RV32-NEXT:    vslidedown.vi v16, v8, 7
-; RV32-NEXT:  .LBB17_174:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    vslidedown.vi v16, v12, 6
-; RV32-NEXT:    vmv.x.s a2, v16
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 7(sp)
-; RV32-NEXT:    bnez a2, .LBB17_176
-; RV32-NEXT:  # %bb.175:
-; RV32-NEXT:    vslidedown.vi v16, v28, 6
-; RV32-NEXT:    j .LBB17_177
-; RV32-NEXT:  .LBB17_176:
-; RV32-NEXT:    vslidedown.vi v16, v8, 6
-; RV32-NEXT:  .LBB17_177:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    vslidedown.vi v16, v12, 5
-; RV32-NEXT:    vmv.x.s a2, v16
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 6(sp)
-; RV32-NEXT:    bnez a2, .LBB17_179
-; RV32-NEXT:  # %bb.178:
-; RV32-NEXT:    vslidedown.vi v16, v28, 5
-; RV32-NEXT:    j .LBB17_180
-; RV32-NEXT:  .LBB17_179:
-; RV32-NEXT:    vslidedown.vi v16, v8, 5
-; RV32-NEXT:  .LBB17_180:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    vslidedown.vi v16, v12, 4
-; RV32-NEXT:    vmv.x.s a2, v16
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 5(sp)
-; RV32-NEXT:    bnez a2, .LBB17_182
-; RV32-NEXT:  # %bb.181:
-; RV32-NEXT:    vslidedown.vi v16, v28, 4
-; RV32-NEXT:    j .LBB17_183
-; RV32-NEXT:  .LBB17_182:
-; RV32-NEXT:    vslidedown.vi v16, v8, 4
-; RV32-NEXT:  .LBB17_183:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    vslidedown.vi v16, v12, 3
-; RV32-NEXT:    vmv.x.s a2, v16
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 4(sp)
-; RV32-NEXT:    bnez a2, .LBB17_185
-; RV32-NEXT:  # %bb.184:
-; RV32-NEXT:    vslidedown.vi v16, v28, 3
-; RV32-NEXT:    j .LBB17_186
-; RV32-NEXT:  .LBB17_185:
-; RV32-NEXT:    vslidedown.vi v16, v8, 3
-; RV32-NEXT:  .LBB17_186:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    vslidedown.vi v16, v12, 2
-; RV32-NEXT:    vmv.x.s a2, v16
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 3(sp)
-; RV32-NEXT:    bnez a2, .LBB17_188
-; RV32-NEXT:  # %bb.187:
-; RV32-NEXT:    vslidedown.vi v16, v28, 2
-; RV32-NEXT:    j .LBB17_189
-; RV32-NEXT:  .LBB17_188:
-; RV32-NEXT:    vslidedown.vi v16, v8, 2
-; RV32-NEXT:  .LBB17_189:
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    vslidedown.vi v12, v12, 1
-; RV32-NEXT:    vmv.x.s a2, v12
-; RV32-NEXT:    andi a2, a2, 1
-; RV32-NEXT:    sb a1, 2(sp)
-; RV32-NEXT:    bnez a2, .LBB17_191
-; RV32-NEXT:  # %bb.190:
-; RV32-NEXT:    vslidedown.vi v28, v28, 1
-; RV32-NEXT:    j .LBB17_192
-; RV32-NEXT:  .LBB17_191:
-; RV32-NEXT:    vslidedown.vi v28, v8, 1
-; RV32-NEXT:  .LBB17_192:
-; RV32-NEXT:    vmv.x.s a1, v28
-; RV32-NEXT:    sb a1, 1(sp)
-; RV32-NEXT:    vsetvli zero, a0, e8,m4,ta,mu
-; RV32-NEXT:    vle8.v v28, (sp)
-; RV32-NEXT:    vand.vi v28, v28, 1
-; RV32-NEXT:    vmsne.vi v0, v28, 0
-; RV32-NEXT:    addi sp, s0, -128
-; RV32-NEXT:    lw s0, 120(sp) # 4-byte Folded Reload
-; RV32-NEXT:    lw ra, 124(sp) # 4-byte Folded Reload
-; RV32-NEXT:    addi sp, sp, 128
-; RV32-NEXT:    ret
-;
-; RV64-LABEL: vselect_v64i1:
-; RV64:       # %bb.0:
-; RV64-NEXT:    addi sp, sp, -128
-; RV64-NEXT:    .cfi_def_cfa_offset 128
-; RV64-NEXT:    sd ra, 120(sp) # 8-byte Folded Spill
-; RV64-NEXT:    sd s0, 112(sp) # 8-byte Folded Spill
-; RV64-NEXT:    .cfi_offset ra, -8
-; RV64-NEXT:    .cfi_offset s0, -16
-; RV64-NEXT:    addi s0, sp, 128
-; RV64-NEXT:    .cfi_def_cfa s0, 0
-; RV64-NEXT:    andi sp, sp, -64
-; RV64-NEXT:    vmv1r.v v25, v0
-; RV64-NEXT:    addi a0, zero, 64
-; RV64-NEXT:    vsetvli zero, a0, e8,m4,ta,mu
-; RV64-NEXT:    vmv.v.i v16, 0
-; RV64-NEXT:    vmv1r.v v0, v8
-; RV64-NEXT:    vmerge.vim v28, v16, 1, v0
-; RV64-NEXT:    vmv1r.v v0, v9
-; RV64-NEXT:    vmerge.vim v12, v16, 1, v0
-; RV64-NEXT:    vmv.x.s a1, v12
-; RV64-NEXT:    andi a1, a1, 1
-; RV64-NEXT:    vmv1r.v v0, v25
-; RV64-NEXT:    vmerge.vim v8, v16, 1, v0
-; RV64-NEXT:    bnez a1, .LBB17_2
-; RV64-NEXT:  # %bb.1:
-; RV64-NEXT:    vmv.x.s a1, v28
-; RV64-NEXT:    j .LBB17_3
-; RV64-NEXT:  .LBB17_2:
-; RV64-NEXT:    vmv.x.s a1, v8
-; RV64-NEXT:  .LBB17_3:
-; RV64-NEXT:    sb a1, 0(sp)
-; RV64-NEXT:    addi a1, zero, 63
-; RV64-NEXT:    vsetivli zero, 1, e8,m4,ta,mu
-; RV64-NEXT:    vslidedown.vx v16, v12, a1
-; RV64-NEXT:    vmv.x.s a2, v16
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    bnez a2, .LBB17_5
-; RV64-NEXT:  # %bb.4:
-; RV64-NEXT:    vslidedown.vx v16, v28, a1
-; RV64-NEXT:    j .LBB17_6
-; RV64-NEXT:  .LBB17_5:
-; RV64-NEXT:    vslidedown.vx v16, v8, a1
-; RV64-NEXT:  .LBB17_6:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    addi a2, zero, 62
-; RV64-NEXT:    vslidedown.vx v16, v12, a2
-; RV64-NEXT:    vmv.x.s a3, v16
-; RV64-NEXT:    andi a3, a3, 1
-; RV64-NEXT:    sb a1, 63(sp)
-; RV64-NEXT:    bnez a3, .LBB17_8
-; RV64-NEXT:  # %bb.7:
-; RV64-NEXT:    vslidedown.vx v16, v28, a2
-; RV64-NEXT:    j .LBB17_9
-; RV64-NEXT:  .LBB17_8:
-; RV64-NEXT:    vslidedown.vx v16, v8, a2
-; RV64-NEXT:  .LBB17_9:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    addi a2, zero, 61
-; RV64-NEXT:    vslidedown.vx v16, v12, a2
-; RV64-NEXT:    vmv.x.s a3, v16
-; RV64-NEXT:    andi a3, a3, 1
-; RV64-NEXT:    sb a1, 62(sp)
-; RV64-NEXT:    bnez a3, .LBB17_11
-; RV64-NEXT:  # %bb.10:
-; RV64-NEXT:    vslidedown.vx v16, v28, a2
-; RV64-NEXT:    j .LBB17_12
-; RV64-NEXT:  .LBB17_11:
-; RV64-NEXT:    vslidedown.vx v16, v8, a2
-; RV64-NEXT:  .LBB17_12:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    addi a2, zero, 60
-; RV64-NEXT:    vslidedown.vx v16, v12, a2
-; RV64-NEXT:    vmv.x.s a3, v16
-; RV64-NEXT:    andi a3, a3, 1
-; RV64-NEXT:    sb a1, 61(sp)
-; RV64-NEXT:    bnez a3, .LBB17_14
-; RV64-NEXT:  # %bb.13:
-; RV64-NEXT:    vslidedown.vx v16, v28, a2
-; RV64-NEXT:    j .LBB17_15
-; RV64-NEXT:  .LBB17_14:
-; RV64-NEXT:    vslidedown.vx v16, v8, a2
-; RV64-NEXT:  .LBB17_15:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    addi a2, zero, 59
-; RV64-NEXT:    vslidedown.vx v16, v12, a2
-; RV64-NEXT:    vmv.x.s a3, v16
-; RV64-NEXT:    andi a3, a3, 1
-; RV64-NEXT:    sb a1, 60(sp)
-; RV64-NEXT:    bnez a3, .LBB17_17
-; RV64-NEXT:  # %bb.16:
-; RV64-NEXT:    vslidedown.vx v16, v28, a2
-; RV64-NEXT:    j .LBB17_18
-; RV64-NEXT:  .LBB17_17:
-; RV64-NEXT:    vslidedown.vx v16, v8, a2
-; RV64-NEXT:  .LBB17_18:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    addi a2, zero, 58
-; RV64-NEXT:    vslidedown.vx v16, v12, a2
-; RV64-NEXT:    vmv.x.s a3, v16
-; RV64-NEXT:    andi a3, a3, 1
-; RV64-NEXT:    sb a1, 59(sp)
-; RV64-NEXT:    bnez a3, .LBB17_20
-; RV64-NEXT:  # %bb.19:
-; RV64-NEXT:    vslidedown.vx v16, v28, a2
-; RV64-NEXT:    j .LBB17_21
-; RV64-NEXT:  .LBB17_20:
-; RV64-NEXT:    vslidedown.vx v16, v8, a2
-; RV64-NEXT:  .LBB17_21:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    addi a2, zero, 57
-; RV64-NEXT:    vslidedown.vx v16, v12, a2
-; RV64-NEXT:    vmv.x.s a3, v16
-; RV64-NEXT:    andi a3, a3, 1
-; RV64-NEXT:    sb a1, 58(sp)
-; RV64-NEXT:    bnez a3, .LBB17_23
-; RV64-NEXT:  # %bb.22:
-; RV64-NEXT:    vslidedown.vx v16, v28, a2
-; RV64-NEXT:    j .LBB17_24
-; RV64-NEXT:  .LBB17_23:
-; RV64-NEXT:    vslidedown.vx v16, v8, a2
-; RV64-NEXT:  .LBB17_24:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    addi a2, zero, 56
-; RV64-NEXT:    vslidedown.vx v16, v12, a2
-; RV64-NEXT:    vmv.x.s a3, v16
-; RV64-NEXT:    andi a3, a3, 1
-; RV64-NEXT:    sb a1, 57(sp)
-; RV64-NEXT:    bnez a3, .LBB17_26
-; RV64-NEXT:  # %bb.25:
-; RV64-NEXT:    vslidedown.vx v16, v28, a2
-; RV64-NEXT:    j .LBB17_27
-; RV64-NEXT:  .LBB17_26:
-; RV64-NEXT:    vslidedown.vx v16, v8, a2
-; RV64-NEXT:  .LBB17_27:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    addi a2, zero, 55
-; RV64-NEXT:    vslidedown.vx v16, v12, a2
-; RV64-NEXT:    vmv.x.s a3, v16
-; RV64-NEXT:    andi a3, a3, 1
-; RV64-NEXT:    sb a1, 56(sp)
-; RV64-NEXT:    bnez a3, .LBB17_29
-; RV64-NEXT:  # %bb.28:
-; RV64-NEXT:    vslidedown.vx v16, v28, a2
-; RV64-NEXT:    j .LBB17_30
-; RV64-NEXT:  .LBB17_29:
-; RV64-NEXT:    vslidedown.vx v16, v8, a2
-; RV64-NEXT:  .LBB17_30:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    addi a2, zero, 54
-; RV64-NEXT:    vslidedown.vx v16, v12, a2
-; RV64-NEXT:    vmv.x.s a3, v16
-; RV64-NEXT:    andi a3, a3, 1
-; RV64-NEXT:    sb a1, 55(sp)
-; RV64-NEXT:    bnez a3, .LBB17_32
-; RV64-NEXT:  # %bb.31:
-; RV64-NEXT:    vslidedown.vx v16, v28, a2
-; RV64-NEXT:    j .LBB17_33
-; RV64-NEXT:  .LBB17_32:
-; RV64-NEXT:    vslidedown.vx v16, v8, a2
-; RV64-NEXT:  .LBB17_33:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    addi a2, zero, 53
-; RV64-NEXT:    vslidedown.vx v16, v12, a2
-; RV64-NEXT:    vmv.x.s a3, v16
-; RV64-NEXT:    andi a3, a3, 1
-; RV64-NEXT:    sb a1, 54(sp)
-; RV64-NEXT:    bnez a3, .LBB17_35
-; RV64-NEXT:  # %bb.34:
-; RV64-NEXT:    vslidedown.vx v16, v28, a2
-; RV64-NEXT:    j .LBB17_36
-; RV64-NEXT:  .LBB17_35:
-; RV64-NEXT:    vslidedown.vx v16, v8, a2
-; RV64-NEXT:  .LBB17_36:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    addi a2, zero, 52
-; RV64-NEXT:    vslidedown.vx v16, v12, a2
-; RV64-NEXT:    vmv.x.s a3, v16
-; RV64-NEXT:    andi a3, a3, 1
-; RV64-NEXT:    sb a1, 53(sp)
-; RV64-NEXT:    bnez a3, .LBB17_38
-; RV64-NEXT:  # %bb.37:
-; RV64-NEXT:    vslidedown.vx v16, v28, a2
-; RV64-NEXT:    j .LBB17_39
-; RV64-NEXT:  .LBB17_38:
-; RV64-NEXT:    vslidedown.vx v16, v8, a2
-; RV64-NEXT:  .LBB17_39:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    addi a2, zero, 51
-; RV64-NEXT:    vslidedown.vx v16, v12, a2
-; RV64-NEXT:    vmv.x.s a3, v16
-; RV64-NEXT:    andi a3, a3, 1
-; RV64-NEXT:    sb a1, 52(sp)
-; RV64-NEXT:    bnez a3, .LBB17_41
-; RV64-NEXT:  # %bb.40:
-; RV64-NEXT:    vslidedown.vx v16, v28, a2
-; RV64-NEXT:    j .LBB17_42
-; RV64-NEXT:  .LBB17_41:
-; RV64-NEXT:    vslidedown.vx v16, v8, a2
-; RV64-NEXT:  .LBB17_42:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    addi a2, zero, 50
-; RV64-NEXT:    vslidedown.vx v16, v12, a2
-; RV64-NEXT:    vmv.x.s a3, v16
-; RV64-NEXT:    andi a3, a3, 1
-; RV64-NEXT:    sb a1, 51(sp)
-; RV64-NEXT:    bnez a3, .LBB17_44
-; RV64-NEXT:  # %bb.43:
-; RV64-NEXT:    vslidedown.vx v16, v28, a2
-; RV64-NEXT:    j .LBB17_45
-; RV64-NEXT:  .LBB17_44:
-; RV64-NEXT:    vslidedown.vx v16, v8, a2
-; RV64-NEXT:  .LBB17_45:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    addi a2, zero, 49
-; RV64-NEXT:    vslidedown.vx v16, v12, a2
-; RV64-NEXT:    vmv.x.s a3, v16
-; RV64-NEXT:    andi a3, a3, 1
-; RV64-NEXT:    sb a1, 50(sp)
-; RV64-NEXT:    bnez a3, .LBB17_47
-; RV64-NEXT:  # %bb.46:
-; RV64-NEXT:    vslidedown.vx v16, v28, a2
-; RV64-NEXT:    j .LBB17_48
-; RV64-NEXT:  .LBB17_47:
-; RV64-NEXT:    vslidedown.vx v16, v8, a2
-; RV64-NEXT:  .LBB17_48:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    addi a2, zero, 48
-; RV64-NEXT:    vslidedown.vx v16, v12, a2
-; RV64-NEXT:    vmv.x.s a3, v16
-; RV64-NEXT:    andi a3, a3, 1
-; RV64-NEXT:    sb a1, 49(sp)
-; RV64-NEXT:    bnez a3, .LBB17_50
-; RV64-NEXT:  # %bb.49:
-; RV64-NEXT:    vslidedown.vx v16, v28, a2
-; RV64-NEXT:    j .LBB17_51
-; RV64-NEXT:  .LBB17_50:
-; RV64-NEXT:    vslidedown.vx v16, v8, a2
-; RV64-NEXT:  .LBB17_51:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    addi a2, zero, 47
-; RV64-NEXT:    vslidedown.vx v16, v12, a2
-; RV64-NEXT:    vmv.x.s a3, v16
-; RV64-NEXT:    andi a3, a3, 1
-; RV64-NEXT:    sb a1, 48(sp)
-; RV64-NEXT:    bnez a3, .LBB17_53
-; RV64-NEXT:  # %bb.52:
-; RV64-NEXT:    vslidedown.vx v16, v28, a2
-; RV64-NEXT:    j .LBB17_54
-; RV64-NEXT:  .LBB17_53:
-; RV64-NEXT:    vslidedown.vx v16, v8, a2
-; RV64-NEXT:  .LBB17_54:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    addi a2, zero, 46
-; RV64-NEXT:    vslidedown.vx v16, v12, a2
-; RV64-NEXT:    vmv.x.s a3, v16
-; RV64-NEXT:    andi a3, a3, 1
-; RV64-NEXT:    sb a1, 47(sp)
-; RV64-NEXT:    bnez a3, .LBB17_56
-; RV64-NEXT:  # %bb.55:
-; RV64-NEXT:    vslidedown.vx v16, v28, a2
-; RV64-NEXT:    j .LBB17_57
-; RV64-NEXT:  .LBB17_56:
-; RV64-NEXT:    vslidedown.vx v16, v8, a2
-; RV64-NEXT:  .LBB17_57:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    addi a2, zero, 45
-; RV64-NEXT:    vslidedown.vx v16, v12, a2
-; RV64-NEXT:    vmv.x.s a3, v16
-; RV64-NEXT:    andi a3, a3, 1
-; RV64-NEXT:    sb a1, 46(sp)
-; RV64-NEXT:    bnez a3, .LBB17_59
-; RV64-NEXT:  # %bb.58:
-; RV64-NEXT:    vslidedown.vx v16, v28, a2
-; RV64-NEXT:    j .LBB17_60
-; RV64-NEXT:  .LBB17_59:
-; RV64-NEXT:    vslidedown.vx v16, v8, a2
-; RV64-NEXT:  .LBB17_60:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    addi a2, zero, 44
-; RV64-NEXT:    vslidedown.vx v16, v12, a2
-; RV64-NEXT:    vmv.x.s a3, v16
-; RV64-NEXT:    andi a3, a3, 1
-; RV64-NEXT:    sb a1, 45(sp)
-; RV64-NEXT:    bnez a3, .LBB17_62
-; RV64-NEXT:  # %bb.61:
-; RV64-NEXT:    vslidedown.vx v16, v28, a2
-; RV64-NEXT:    j .LBB17_63
-; RV64-NEXT:  .LBB17_62:
-; RV64-NEXT:    vslidedown.vx v16, v8, a2
-; RV64-NEXT:  .LBB17_63:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    addi a2, zero, 43
-; RV64-NEXT:    vslidedown.vx v16, v12, a2
-; RV64-NEXT:    vmv.x.s a3, v16
-; RV64-NEXT:    andi a3, a3, 1
-; RV64-NEXT:    sb a1, 44(sp)
-; RV64-NEXT:    bnez a3, .LBB17_65
-; RV64-NEXT:  # %bb.64:
-; RV64-NEXT:    vslidedown.vx v16, v28, a2
-; RV64-NEXT:    j .LBB17_66
-; RV64-NEXT:  .LBB17_65:
-; RV64-NEXT:    vslidedown.vx v16, v8, a2
-; RV64-NEXT:  .LBB17_66:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    addi a2, zero, 42
-; RV64-NEXT:    vslidedown.vx v16, v12, a2
-; RV64-NEXT:    vmv.x.s a3, v16
-; RV64-NEXT:    andi a3, a3, 1
-; RV64-NEXT:    sb a1, 43(sp)
-; RV64-NEXT:    bnez a3, .LBB17_68
-; RV64-NEXT:  # %bb.67:
-; RV64-NEXT:    vslidedown.vx v16, v28, a2
-; RV64-NEXT:    j .LBB17_69
-; RV64-NEXT:  .LBB17_68:
-; RV64-NEXT:    vslidedown.vx v16, v8, a2
-; RV64-NEXT:  .LBB17_69:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    addi a2, zero, 41
-; RV64-NEXT:    vslidedown.vx v16, v12, a2
-; RV64-NEXT:    vmv.x.s a3, v16
-; RV64-NEXT:    andi a3, a3, 1
-; RV64-NEXT:    sb a1, 42(sp)
-; RV64-NEXT:    bnez a3, .LBB17_71
-; RV64-NEXT:  # %bb.70:
-; RV64-NEXT:    vslidedown.vx v16, v28, a2
-; RV64-NEXT:    j .LBB17_72
-; RV64-NEXT:  .LBB17_71:
-; RV64-NEXT:    vslidedown.vx v16, v8, a2
-; RV64-NEXT:  .LBB17_72:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    addi a2, zero, 40
-; RV64-NEXT:    vslidedown.vx v16, v12, a2
-; RV64-NEXT:    vmv.x.s a3, v16
-; RV64-NEXT:    andi a3, a3, 1
-; RV64-NEXT:    sb a1, 41(sp)
-; RV64-NEXT:    bnez a3, .LBB17_74
-; RV64-NEXT:  # %bb.73:
-; RV64-NEXT:    vslidedown.vx v16, v28, a2
-; RV64-NEXT:    j .LBB17_75
-; RV64-NEXT:  .LBB17_74:
-; RV64-NEXT:    vslidedown.vx v16, v8, a2
-; RV64-NEXT:  .LBB17_75:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    addi a2, zero, 39
-; RV64-NEXT:    vslidedown.vx v16, v12, a2
-; RV64-NEXT:    vmv.x.s a3, v16
-; RV64-NEXT:    andi a3, a3, 1
-; RV64-NEXT:    sb a1, 40(sp)
-; RV64-NEXT:    bnez a3, .LBB17_77
-; RV64-NEXT:  # %bb.76:
-; RV64-NEXT:    vslidedown.vx v16, v28, a2
-; RV64-NEXT:    j .LBB17_78
-; RV64-NEXT:  .LBB17_77:
-; RV64-NEXT:    vslidedown.vx v16, v8, a2
-; RV64-NEXT:  .LBB17_78:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    addi a2, zero, 38
-; RV64-NEXT:    vslidedown.vx v16, v12, a2
-; RV64-NEXT:    vmv.x.s a3, v16
-; RV64-NEXT:    andi a3, a3, 1
-; RV64-NEXT:    sb a1, 39(sp)
-; RV64-NEXT:    bnez a3, .LBB17_80
-; RV64-NEXT:  # %bb.79:
-; RV64-NEXT:    vslidedown.vx v16, v28, a2
-; RV64-NEXT:    j .LBB17_81
-; RV64-NEXT:  .LBB17_80:
-; RV64-NEXT:    vslidedown.vx v16, v8, a2
-; RV64-NEXT:  .LBB17_81:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    addi a2, zero, 37
-; RV64-NEXT:    vslidedown.vx v16, v12, a2
-; RV64-NEXT:    vmv.x.s a3, v16
-; RV64-NEXT:    andi a3, a3, 1
-; RV64-NEXT:    sb a1, 38(sp)
-; RV64-NEXT:    bnez a3, .LBB17_83
-; RV64-NEXT:  # %bb.82:
-; RV64-NEXT:    vslidedown.vx v16, v28, a2
-; RV64-NEXT:    j .LBB17_84
-; RV64-NEXT:  .LBB17_83:
-; RV64-NEXT:    vslidedown.vx v16, v8, a2
-; RV64-NEXT:  .LBB17_84:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    addi a2, zero, 36
-; RV64-NEXT:    vslidedown.vx v16, v12, a2
-; RV64-NEXT:    vmv.x.s a3, v16
-; RV64-NEXT:    andi a3, a3, 1
-; RV64-NEXT:    sb a1, 37(sp)
-; RV64-NEXT:    bnez a3, .LBB17_86
-; RV64-NEXT:  # %bb.85:
-; RV64-NEXT:    vslidedown.vx v16, v28, a2
-; RV64-NEXT:    j .LBB17_87
-; RV64-NEXT:  .LBB17_86:
-; RV64-NEXT:    vslidedown.vx v16, v8, a2
-; RV64-NEXT:  .LBB17_87:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    addi a2, zero, 35
-; RV64-NEXT:    vslidedown.vx v16, v12, a2
-; RV64-NEXT:    vmv.x.s a3, v16
-; RV64-NEXT:    andi a3, a3, 1
-; RV64-NEXT:    sb a1, 36(sp)
-; RV64-NEXT:    bnez a3, .LBB17_89
-; RV64-NEXT:  # %bb.88:
-; RV64-NEXT:    vslidedown.vx v16, v28, a2
-; RV64-NEXT:    j .LBB17_90
-; RV64-NEXT:  .LBB17_89:
-; RV64-NEXT:    vslidedown.vx v16, v8, a2
-; RV64-NEXT:  .LBB17_90:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    addi a2, zero, 34
-; RV64-NEXT:    vslidedown.vx v16, v12, a2
-; RV64-NEXT:    vmv.x.s a3, v16
-; RV64-NEXT:    andi a3, a3, 1
-; RV64-NEXT:    sb a1, 35(sp)
-; RV64-NEXT:    bnez a3, .LBB17_92
-; RV64-NEXT:  # %bb.91:
-; RV64-NEXT:    vslidedown.vx v16, v28, a2
-; RV64-NEXT:    j .LBB17_93
-; RV64-NEXT:  .LBB17_92:
-; RV64-NEXT:    vslidedown.vx v16, v8, a2
-; RV64-NEXT:  .LBB17_93:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    addi a2, zero, 33
-; RV64-NEXT:    vslidedown.vx v16, v12, a2
-; RV64-NEXT:    vmv.x.s a3, v16
-; RV64-NEXT:    andi a3, a3, 1
-; RV64-NEXT:    sb a1, 34(sp)
-; RV64-NEXT:    bnez a3, .LBB17_95
-; RV64-NEXT:  # %bb.94:
-; RV64-NEXT:    vslidedown.vx v16, v28, a2
-; RV64-NEXT:    j .LBB17_96
-; RV64-NEXT:  .LBB17_95:
-; RV64-NEXT:    vslidedown.vx v16, v8, a2
-; RV64-NEXT:  .LBB17_96:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    addi a2, zero, 32
-; RV64-NEXT:    vslidedown.vx v16, v12, a2
-; RV64-NEXT:    vmv.x.s a3, v16
-; RV64-NEXT:    andi a3, a3, 1
-; RV64-NEXT:    sb a1, 33(sp)
-; RV64-NEXT:    bnez a3, .LBB17_98
-; RV64-NEXT:  # %bb.97:
-; RV64-NEXT:    vslidedown.vx v16, v28, a2
-; RV64-NEXT:    j .LBB17_99
-; RV64-NEXT:  .LBB17_98:
-; RV64-NEXT:    vslidedown.vx v16, v8, a2
-; RV64-NEXT:  .LBB17_99:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    vslidedown.vi v16, v12, 31
-; RV64-NEXT:    vmv.x.s a2, v16
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 32(sp)
-; RV64-NEXT:    bnez a2, .LBB17_101
-; RV64-NEXT:  # %bb.100:
-; RV64-NEXT:    vslidedown.vi v16, v28, 31
-; RV64-NEXT:    j .LBB17_102
-; RV64-NEXT:  .LBB17_101:
-; RV64-NEXT:    vslidedown.vi v16, v8, 31
-; RV64-NEXT:  .LBB17_102:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    vslidedown.vi v16, v12, 30
-; RV64-NEXT:    vmv.x.s a2, v16
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 31(sp)
-; RV64-NEXT:    bnez a2, .LBB17_104
-; RV64-NEXT:  # %bb.103:
-; RV64-NEXT:    vslidedown.vi v16, v28, 30
-; RV64-NEXT:    j .LBB17_105
-; RV64-NEXT:  .LBB17_104:
-; RV64-NEXT:    vslidedown.vi v16, v8, 30
-; RV64-NEXT:  .LBB17_105:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    vslidedown.vi v16, v12, 29
-; RV64-NEXT:    vmv.x.s a2, v16
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 30(sp)
-; RV64-NEXT:    bnez a2, .LBB17_107
-; RV64-NEXT:  # %bb.106:
-; RV64-NEXT:    vslidedown.vi v16, v28, 29
-; RV64-NEXT:    j .LBB17_108
-; RV64-NEXT:  .LBB17_107:
-; RV64-NEXT:    vslidedown.vi v16, v8, 29
-; RV64-NEXT:  .LBB17_108:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    vslidedown.vi v16, v12, 28
-; RV64-NEXT:    vmv.x.s a2, v16
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 29(sp)
-; RV64-NEXT:    bnez a2, .LBB17_110
-; RV64-NEXT:  # %bb.109:
-; RV64-NEXT:    vslidedown.vi v16, v28, 28
-; RV64-NEXT:    j .LBB17_111
-; RV64-NEXT:  .LBB17_110:
-; RV64-NEXT:    vslidedown.vi v16, v8, 28
-; RV64-NEXT:  .LBB17_111:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    vslidedown.vi v16, v12, 27
-; RV64-NEXT:    vmv.x.s a2, v16
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 28(sp)
-; RV64-NEXT:    bnez a2, .LBB17_113
-; RV64-NEXT:  # %bb.112:
-; RV64-NEXT:    vslidedown.vi v16, v28, 27
-; RV64-NEXT:    j .LBB17_114
-; RV64-NEXT:  .LBB17_113:
-; RV64-NEXT:    vslidedown.vi v16, v8, 27
-; RV64-NEXT:  .LBB17_114:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    vslidedown.vi v16, v12, 26
-; RV64-NEXT:    vmv.x.s a2, v16
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 27(sp)
-; RV64-NEXT:    bnez a2, .LBB17_116
-; RV64-NEXT:  # %bb.115:
-; RV64-NEXT:    vslidedown.vi v16, v28, 26
-; RV64-NEXT:    j .LBB17_117
-; RV64-NEXT:  .LBB17_116:
-; RV64-NEXT:    vslidedown.vi v16, v8, 26
-; RV64-NEXT:  .LBB17_117:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    vslidedown.vi v16, v12, 25
-; RV64-NEXT:    vmv.x.s a2, v16
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 26(sp)
-; RV64-NEXT:    bnez a2, .LBB17_119
-; RV64-NEXT:  # %bb.118:
-; RV64-NEXT:    vslidedown.vi v16, v28, 25
-; RV64-NEXT:    j .LBB17_120
-; RV64-NEXT:  .LBB17_119:
-; RV64-NEXT:    vslidedown.vi v16, v8, 25
-; RV64-NEXT:  .LBB17_120:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    vslidedown.vi v16, v12, 24
-; RV64-NEXT:    vmv.x.s a2, v16
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 25(sp)
-; RV64-NEXT:    bnez a2, .LBB17_122
-; RV64-NEXT:  # %bb.121:
-; RV64-NEXT:    vslidedown.vi v16, v28, 24
-; RV64-NEXT:    j .LBB17_123
-; RV64-NEXT:  .LBB17_122:
-; RV64-NEXT:    vslidedown.vi v16, v8, 24
-; RV64-NEXT:  .LBB17_123:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    vslidedown.vi v16, v12, 23
-; RV64-NEXT:    vmv.x.s a2, v16
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 24(sp)
-; RV64-NEXT:    bnez a2, .LBB17_125
-; RV64-NEXT:  # %bb.124:
-; RV64-NEXT:    vslidedown.vi v16, v28, 23
-; RV64-NEXT:    j .LBB17_126
-; RV64-NEXT:  .LBB17_125:
-; RV64-NEXT:    vslidedown.vi v16, v8, 23
-; RV64-NEXT:  .LBB17_126:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    vslidedown.vi v16, v12, 22
-; RV64-NEXT:    vmv.x.s a2, v16
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 23(sp)
-; RV64-NEXT:    bnez a2, .LBB17_128
-; RV64-NEXT:  # %bb.127:
-; RV64-NEXT:    vslidedown.vi v16, v28, 22
-; RV64-NEXT:    j .LBB17_129
-; RV64-NEXT:  .LBB17_128:
-; RV64-NEXT:    vslidedown.vi v16, v8, 22
-; RV64-NEXT:  .LBB17_129:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    vslidedown.vi v16, v12, 21
-; RV64-NEXT:    vmv.x.s a2, v16
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 22(sp)
-; RV64-NEXT:    bnez a2, .LBB17_131
-; RV64-NEXT:  # %bb.130:
-; RV64-NEXT:    vslidedown.vi v16, v28, 21
-; RV64-NEXT:    j .LBB17_132
-; RV64-NEXT:  .LBB17_131:
-; RV64-NEXT:    vslidedown.vi v16, v8, 21
-; RV64-NEXT:  .LBB17_132:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    vslidedown.vi v16, v12, 20
-; RV64-NEXT:    vmv.x.s a2, v16
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 21(sp)
-; RV64-NEXT:    bnez a2, .LBB17_134
-; RV64-NEXT:  # %bb.133:
-; RV64-NEXT:    vslidedown.vi v16, v28, 20
-; RV64-NEXT:    j .LBB17_135
-; RV64-NEXT:  .LBB17_134:
-; RV64-NEXT:    vslidedown.vi v16, v8, 20
-; RV64-NEXT:  .LBB17_135:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    vslidedown.vi v16, v12, 19
-; RV64-NEXT:    vmv.x.s a2, v16
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 20(sp)
-; RV64-NEXT:    bnez a2, .LBB17_137
-; RV64-NEXT:  # %bb.136:
-; RV64-NEXT:    vslidedown.vi v16, v28, 19
-; RV64-NEXT:    j .LBB17_138
-; RV64-NEXT:  .LBB17_137:
-; RV64-NEXT:    vslidedown.vi v16, v8, 19
-; RV64-NEXT:  .LBB17_138:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    vslidedown.vi v16, v12, 18
-; RV64-NEXT:    vmv.x.s a2, v16
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 19(sp)
-; RV64-NEXT:    bnez a2, .LBB17_140
-; RV64-NEXT:  # %bb.139:
-; RV64-NEXT:    vslidedown.vi v16, v28, 18
-; RV64-NEXT:    j .LBB17_141
-; RV64-NEXT:  .LBB17_140:
-; RV64-NEXT:    vslidedown.vi v16, v8, 18
-; RV64-NEXT:  .LBB17_141:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    vslidedown.vi v16, v12, 17
-; RV64-NEXT:    vmv.x.s a2, v16
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 18(sp)
-; RV64-NEXT:    bnez a2, .LBB17_143
-; RV64-NEXT:  # %bb.142:
-; RV64-NEXT:    vslidedown.vi v16, v28, 17
-; RV64-NEXT:    j .LBB17_144
-; RV64-NEXT:  .LBB17_143:
-; RV64-NEXT:    vslidedown.vi v16, v8, 17
-; RV64-NEXT:  .LBB17_144:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    vslidedown.vi v16, v12, 16
-; RV64-NEXT:    vmv.x.s a2, v16
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 17(sp)
-; RV64-NEXT:    bnez a2, .LBB17_146
-; RV64-NEXT:  # %bb.145:
-; RV64-NEXT:    vslidedown.vi v16, v28, 16
-; RV64-NEXT:    j .LBB17_147
-; RV64-NEXT:  .LBB17_146:
-; RV64-NEXT:    vslidedown.vi v16, v8, 16
-; RV64-NEXT:  .LBB17_147:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    vslidedown.vi v16, v12, 15
-; RV64-NEXT:    vmv.x.s a2, v16
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 16(sp)
-; RV64-NEXT:    bnez a2, .LBB17_149
-; RV64-NEXT:  # %bb.148:
-; RV64-NEXT:    vslidedown.vi v16, v28, 15
-; RV64-NEXT:    j .LBB17_150
-; RV64-NEXT:  .LBB17_149:
-; RV64-NEXT:    vslidedown.vi v16, v8, 15
-; RV64-NEXT:  .LBB17_150:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    vslidedown.vi v16, v12, 14
-; RV64-NEXT:    vmv.x.s a2, v16
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 15(sp)
-; RV64-NEXT:    bnez a2, .LBB17_152
-; RV64-NEXT:  # %bb.151:
-; RV64-NEXT:    vslidedown.vi v16, v28, 14
-; RV64-NEXT:    j .LBB17_153
-; RV64-NEXT:  .LBB17_152:
-; RV64-NEXT:    vslidedown.vi v16, v8, 14
-; RV64-NEXT:  .LBB17_153:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    vslidedown.vi v16, v12, 13
-; RV64-NEXT:    vmv.x.s a2, v16
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 14(sp)
-; RV64-NEXT:    bnez a2, .LBB17_155
-; RV64-NEXT:  # %bb.154:
-; RV64-NEXT:    vslidedown.vi v16, v28, 13
-; RV64-NEXT:    j .LBB17_156
-; RV64-NEXT:  .LBB17_155:
-; RV64-NEXT:    vslidedown.vi v16, v8, 13
-; RV64-NEXT:  .LBB17_156:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    vslidedown.vi v16, v12, 12
-; RV64-NEXT:    vmv.x.s a2, v16
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 13(sp)
-; RV64-NEXT:    bnez a2, .LBB17_158
-; RV64-NEXT:  # %bb.157:
-; RV64-NEXT:    vslidedown.vi v16, v28, 12
-; RV64-NEXT:    j .LBB17_159
-; RV64-NEXT:  .LBB17_158:
-; RV64-NEXT:    vslidedown.vi v16, v8, 12
-; RV64-NEXT:  .LBB17_159:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    vslidedown.vi v16, v12, 11
-; RV64-NEXT:    vmv.x.s a2, v16
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 12(sp)
-; RV64-NEXT:    bnez a2, .LBB17_161
-; RV64-NEXT:  # %bb.160:
-; RV64-NEXT:    vslidedown.vi v16, v28, 11
-; RV64-NEXT:    j .LBB17_162
-; RV64-NEXT:  .LBB17_161:
-; RV64-NEXT:    vslidedown.vi v16, v8, 11
-; RV64-NEXT:  .LBB17_162:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    vslidedown.vi v16, v12, 10
-; RV64-NEXT:    vmv.x.s a2, v16
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 11(sp)
-; RV64-NEXT:    bnez a2, .LBB17_164
-; RV64-NEXT:  # %bb.163:
-; RV64-NEXT:    vslidedown.vi v16, v28, 10
-; RV64-NEXT:    j .LBB17_165
-; RV64-NEXT:  .LBB17_164:
-; RV64-NEXT:    vslidedown.vi v16, v8, 10
-; RV64-NEXT:  .LBB17_165:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    vslidedown.vi v16, v12, 9
-; RV64-NEXT:    vmv.x.s a2, v16
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 10(sp)
-; RV64-NEXT:    bnez a2, .LBB17_167
-; RV64-NEXT:  # %bb.166:
-; RV64-NEXT:    vslidedown.vi v16, v28, 9
-; RV64-NEXT:    j .LBB17_168
-; RV64-NEXT:  .LBB17_167:
-; RV64-NEXT:    vslidedown.vi v16, v8, 9
-; RV64-NEXT:  .LBB17_168:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    vslidedown.vi v16, v12, 8
-; RV64-NEXT:    vmv.x.s a2, v16
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 9(sp)
-; RV64-NEXT:    bnez a2, .LBB17_170
-; RV64-NEXT:  # %bb.169:
-; RV64-NEXT:    vslidedown.vi v16, v28, 8
-; RV64-NEXT:    j .LBB17_171
-; RV64-NEXT:  .LBB17_170:
-; RV64-NEXT:    vslidedown.vi v16, v8, 8
-; RV64-NEXT:  .LBB17_171:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    vslidedown.vi v16, v12, 7
-; RV64-NEXT:    vmv.x.s a2, v16
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 8(sp)
-; RV64-NEXT:    bnez a2, .LBB17_173
-; RV64-NEXT:  # %bb.172:
-; RV64-NEXT:    vslidedown.vi v16, v28, 7
-; RV64-NEXT:    j .LBB17_174
-; RV64-NEXT:  .LBB17_173:
-; RV64-NEXT:    vslidedown.vi v16, v8, 7
-; RV64-NEXT:  .LBB17_174:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    vslidedown.vi v16, v12, 6
-; RV64-NEXT:    vmv.x.s a2, v16
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 7(sp)
-; RV64-NEXT:    bnez a2, .LBB17_176
-; RV64-NEXT:  # %bb.175:
-; RV64-NEXT:    vslidedown.vi v16, v28, 6
-; RV64-NEXT:    j .LBB17_177
-; RV64-NEXT:  .LBB17_176:
-; RV64-NEXT:    vslidedown.vi v16, v8, 6
-; RV64-NEXT:  .LBB17_177:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    vslidedown.vi v16, v12, 5
-; RV64-NEXT:    vmv.x.s a2, v16
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 6(sp)
-; RV64-NEXT:    bnez a2, .LBB17_179
-; RV64-NEXT:  # %bb.178:
-; RV64-NEXT:    vslidedown.vi v16, v28, 5
-; RV64-NEXT:    j .LBB17_180
-; RV64-NEXT:  .LBB17_179:
-; RV64-NEXT:    vslidedown.vi v16, v8, 5
-; RV64-NEXT:  .LBB17_180:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    vslidedown.vi v16, v12, 4
-; RV64-NEXT:    vmv.x.s a2, v16
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 5(sp)
-; RV64-NEXT:    bnez a2, .LBB17_182
-; RV64-NEXT:  # %bb.181:
-; RV64-NEXT:    vslidedown.vi v16, v28, 4
-; RV64-NEXT:    j .LBB17_183
-; RV64-NEXT:  .LBB17_182:
-; RV64-NEXT:    vslidedown.vi v16, v8, 4
-; RV64-NEXT:  .LBB17_183:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    vslidedown.vi v16, v12, 3
-; RV64-NEXT:    vmv.x.s a2, v16
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 4(sp)
-; RV64-NEXT:    bnez a2, .LBB17_185
-; RV64-NEXT:  # %bb.184:
-; RV64-NEXT:    vslidedown.vi v16, v28, 3
-; RV64-NEXT:    j .LBB17_186
-; RV64-NEXT:  .LBB17_185:
-; RV64-NEXT:    vslidedown.vi v16, v8, 3
-; RV64-NEXT:  .LBB17_186:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    vslidedown.vi v16, v12, 2
-; RV64-NEXT:    vmv.x.s a2, v16
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 3(sp)
-; RV64-NEXT:    bnez a2, .LBB17_188
-; RV64-NEXT:  # %bb.187:
-; RV64-NEXT:    vslidedown.vi v16, v28, 2
-; RV64-NEXT:    j .LBB17_189
-; RV64-NEXT:  .LBB17_188:
-; RV64-NEXT:    vslidedown.vi v16, v8, 2
-; RV64-NEXT:  .LBB17_189:
-; RV64-NEXT:    vmv.x.s a1, v16
-; RV64-NEXT:    vslidedown.vi v12, v12, 1
-; RV64-NEXT:    vmv.x.s a2, v12
-; RV64-NEXT:    andi a2, a2, 1
-; RV64-NEXT:    sb a1, 2(sp)
-; RV64-NEXT:    bnez a2, .LBB17_191
-; RV64-NEXT:  # %bb.190:
-; RV64-NEXT:    vslidedown.vi v28, v28, 1
-; RV64-NEXT:    j .LBB17_192
-; RV64-NEXT:  .LBB17_191:
-; RV64-NEXT:    vslidedown.vi v28, v8, 1
-; RV64-NEXT:  .LBB17_192:
-; RV64-NEXT:    vmv.x.s a1, v28
-; RV64-NEXT:    sb a1, 1(sp)
-; RV64-NEXT:    vsetvli zero, a0, e8,m4,ta,mu
-; RV64-NEXT:    vle8.v v28, (sp)
-; RV64-NEXT:    vand.vi v28, v28, 1
-; RV64-NEXT:    vmsne.vi v0, v28, 0
-; RV64-NEXT:    addi sp, s0, -128
-; RV64-NEXT:    ld s0, 112(sp) # 8-byte Folded Reload
-; RV64-NEXT:    ld ra, 120(sp) # 8-byte Folded Reload
-; RV64-NEXT:    addi sp, sp, 128
-; RV64-NEXT:    ret
+; CHECK-LABEL: vselect_v64i1:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    addi a0, zero, 64
+; CHECK-NEXT:    vsetvli zero, a0, e8,m4,ta,mu
+; CHECK-NEXT:    vmandnot.mm v25, v8, v9
+; CHECK-NEXT:    vmand.mm v26, v0, v9
+; CHECK-NEXT:    vmor.mm v0, v26, v25
+; CHECK-NEXT:    ret
   %v = select <64 x i1> %cc, <64 x i1> %a, <64 x i1> %b
   ret <64 x i1> %v
 }

diff  --git a/llvm/test/CodeGen/RISCV/rvv/vselect-mask.ll b/llvm/test/CodeGen/RISCV/rvv/vselect-mask.ll
new file mode 100644
index 0000000000000..0b15382c1e2b0
--- /dev/null
+++ b/llvm/test/CodeGen/RISCV/rvv/vselect-mask.ll
@@ -0,0 +1,87 @@
+; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
+; RUN: llc -mtriple=riscv32 -mattr=+experimental-v -verify-machineinstrs < %s | FileCheck %s
+; RUN: llc -mtriple=riscv64 -mattr=+experimental-v -verify-machineinstrs < %s | FileCheck %s
+
+define <vscale x 1 x i1> @vselect_nxv1i1(<vscale x 1 x i1> %a, <vscale x 1 x i1> %b, <vscale x 1 x i1> %cc) {
+; CHECK-LABEL: vselect_nxv1i1:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetvli a0, zero, e8,mf8,ta,mu
+; CHECK-NEXT:    vmandnot.mm v25, v8, v9
+; CHECK-NEXT:    vmand.mm v26, v0, v9
+; CHECK-NEXT:    vmor.mm v0, v26, v25
+; CHECK-NEXT:    ret
+  %v = select <vscale x 1 x i1> %cc, <vscale x 1 x i1> %a, <vscale x 1 x i1> %b
+  ret <vscale x 1 x i1> %v
+}
+
+define <vscale x 2 x i1> @vselect_nxv2i1(<vscale x 2 x i1> %a, <vscale x 2 x i1> %b, <vscale x 2 x i1> %cc) {
+; CHECK-LABEL: vselect_nxv2i1:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetvli a0, zero, e8,mf4,ta,mu
+; CHECK-NEXT:    vmandnot.mm v25, v8, v9
+; CHECK-NEXT:    vmand.mm v26, v0, v9
+; CHECK-NEXT:    vmor.mm v0, v26, v25
+; CHECK-NEXT:    ret
+  %v = select <vscale x 2 x i1> %cc, <vscale x 2 x i1> %a, <vscale x 2 x i1> %b
+  ret <vscale x 2 x i1> %v
+}
+
+define <vscale x 4 x i1> @vselect_nxv4i1(<vscale x 4 x i1> %a, <vscale x 4 x i1> %b, <vscale x 4 x i1> %cc) {
+; CHECK-LABEL: vselect_nxv4i1:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetvli a0, zero, e8,mf2,ta,mu
+; CHECK-NEXT:    vmandnot.mm v25, v8, v9
+; CHECK-NEXT:    vmand.mm v26, v0, v9
+; CHECK-NEXT:    vmor.mm v0, v26, v25
+; CHECK-NEXT:    ret
+  %v = select <vscale x 4 x i1> %cc, <vscale x 4 x i1> %a, <vscale x 4 x i1> %b
+  ret <vscale x 4 x i1> %v
+}
+
+define <vscale x 8 x i1> @vselect_nxv8i1(<vscale x 8 x i1> %a, <vscale x 8 x i1> %b, <vscale x 8 x i1> %cc) {
+; CHECK-LABEL: vselect_nxv8i1:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetvli a0, zero, e8,m1,ta,mu
+; CHECK-NEXT:    vmandnot.mm v25, v8, v9
+; CHECK-NEXT:    vmand.mm v26, v0, v9
+; CHECK-NEXT:    vmor.mm v0, v26, v25
+; CHECK-NEXT:    ret
+  %v = select <vscale x 8 x i1> %cc, <vscale x 8 x i1> %a, <vscale x 8 x i1> %b
+  ret <vscale x 8 x i1> %v
+}
+
+define <vscale x 16 x i1> @vselect_nxv16i1(<vscale x 16 x i1> %a, <vscale x 16 x i1> %b, <vscale x 16 x i1> %cc) {
+; CHECK-LABEL: vselect_nxv16i1:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetvli a0, zero, e8,m2,ta,mu
+; CHECK-NEXT:    vmandnot.mm v25, v8, v9
+; CHECK-NEXT:    vmand.mm v26, v0, v9
+; CHECK-NEXT:    vmor.mm v0, v26, v25
+; CHECK-NEXT:    ret
+  %v = select <vscale x 16 x i1> %cc, <vscale x 16 x i1> %a, <vscale x 16 x i1> %b
+  ret <vscale x 16 x i1> %v
+}
+
+define <vscale x 32 x i1> @vselect_nxv32i1(<vscale x 32 x i1> %a, <vscale x 32 x i1> %b, <vscale x 32 x i1> %cc) {
+; CHECK-LABEL: vselect_nxv32i1:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetvli a0, zero, e8,m4,ta,mu
+; CHECK-NEXT:    vmandnot.mm v25, v8, v9
+; CHECK-NEXT:    vmand.mm v26, v0, v9
+; CHECK-NEXT:    vmor.mm v0, v26, v25
+; CHECK-NEXT:    ret
+  %v = select <vscale x 32 x i1> %cc, <vscale x 32 x i1> %a, <vscale x 32 x i1> %b
+  ret <vscale x 32 x i1> %v
+}
+
+define <vscale x 64 x i1> @vselect_nxv64i1(<vscale x 64 x i1> %a, <vscale x 64 x i1> %b, <vscale x 64 x i1> %cc) {
+; CHECK-LABEL: vselect_nxv64i1:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetvli a0, zero, e8,m8,ta,mu
+; CHECK-NEXT:    vmandnot.mm v25, v8, v9
+; CHECK-NEXT:    vmand.mm v26, v0, v9
+; CHECK-NEXT:    vmor.mm v0, v26, v25
+; CHECK-NEXT:    ret
+  %v = select <vscale x 64 x i1> %cc, <vscale x 64 x i1> %a, <vscale x 64 x i1> %b
+  ret <vscale x 64 x i1> %v
+}


        


More information about the llvm-commits mailing list