[llvm] [RISCV] Support llvm.masked.expandload intrinsic (PR #101954)

Pengcheng Wang via llvm-commits llvm-commits at lists.llvm.org
Wed Oct 30 01:41:48 PDT 2024


https://github.com/wangpc-pp updated https://github.com/llvm/llvm-project/pull/101954

>From 9e4b22db42bc288edb086749d44b5497109c9b39 Mon Sep 17 00:00:00 2001
From: Wang Pengcheng <wangpengcheng.pp at bytedance.com>
Date: Mon, 5 Aug 2024 17:30:35 +0800
Subject: [PATCH 01/15] [RISCV] Support llvm.masked.expandload intrinsic

We can use `viota.m` + indexed load to synthesize expanding load:
```
%res = llvm.masked.expandload(%ptr, %mask, %passthru)
->
%index = viota %mask
if elt_size > 8:
  %index = vsll.vi %index, log2(elt_size), %mask
%res = vluxei<n> %passthru, %ptr, %index, %mask
```

And if `%mask` is all ones, we can lower expanding load to a normal
unmasked load.

Fixes #101914
---
 llvm/lib/Target/RISCV/RISCVISelLowering.cpp   |   30 +-
 .../Target/RISCV/RISCVTargetTransformInfo.cpp |   10 +
 .../Target/RISCV/RISCVTargetTransformInfo.h   |    2 +
 llvm/test/CodeGen/RISCV/rvv/expandload.ll     | 1549 +++++++++++++++++
 .../RISCV/rvv/fixed-vectors-expandload-fp.ll  | 1046 ++---------
 .../RISCV/rvv/fixed-vectors-expandload-int.ll |  897 +---------
 6 files changed, 1763 insertions(+), 1771 deletions(-)
 create mode 100644 llvm/test/CodeGen/RISCV/rvv/expandload.ll

diff --git a/llvm/lib/Target/RISCV/RISCVISelLowering.cpp b/llvm/lib/Target/RISCV/RISCVISelLowering.cpp
index 3588ef46cadce1..31aecc205a76f1 100644
--- a/llvm/lib/Target/RISCV/RISCVISelLowering.cpp
+++ b/llvm/lib/Target/RISCV/RISCVISelLowering.cpp
@@ -11107,6 +11107,7 @@ SDValue RISCVTargetLowering::lowerMaskedLoad(SDValue Op,
   SDValue BasePtr = MemSD->getBasePtr();
 
   SDValue Mask, PassThru, VL;
+  bool IsExpandingLoad = false;
   if (const auto *VPLoad = dyn_cast<VPLoadSDNode>(Op)) {
     Mask = VPLoad->getMask();
     PassThru = DAG.getUNDEF(VT);
@@ -11115,6 +11116,7 @@ SDValue RISCVTargetLowering::lowerMaskedLoad(SDValue Op,
     const auto *MLoad = cast<MaskedLoadSDNode>(Op);
     Mask = MLoad->getMask();
     PassThru = MLoad->getPassThru();
+    IsExpandingLoad = MLoad->isExpandingLoad();
   }
 
   bool IsUnmasked = ISD::isConstantSplatVectorAllOnes(Mask.getNode());
@@ -11134,16 +11136,38 @@ SDValue RISCVTargetLowering::lowerMaskedLoad(SDValue Op,
   if (!VL)
     VL = getDefaultVLOps(VT, ContainerVT, DL, DAG, Subtarget).second;
 
-  unsigned IntID =
-      IsUnmasked ? Intrinsic::riscv_vle : Intrinsic::riscv_vle_mask;
+  SDValue Index;
+  if (!IsUnmasked && IsExpandingLoad) {
+    MVT IndexVT = ContainerVT;
+    if (ContainerVT.isFloatingPoint())
+      IndexVT = IndexVT.changeVectorElementTypeToInteger();
+
+    if (Subtarget.isRV32() && IndexVT.getVectorElementType().bitsGT(XLenVT))
+      IndexVT = IndexVT.changeVectorElementType(XLenVT);
+
+    Index = DAG.getNode(ISD::INTRINSIC_WO_CHAIN, DL, IndexVT,
+                        DAG.getConstant(Intrinsic::riscv_viota, DL, XLenVT),
+                        DAG.getUNDEF(IndexVT), Mask, VL);
+    if (uint64_t EltSize = ContainerVT.getScalarSizeInBits(); EltSize > 8)
+      Index = DAG.getNode(RISCVISD::SHL_VL, DL, IndexVT, Index,
+                          DAG.getConstant(Log2_64(EltSize / 8), DL, IndexVT),
+                          DAG.getUNDEF(IndexVT), Mask, VL);
+  }
+
+  unsigned IntID = IsUnmasked        ? Intrinsic::riscv_vle
+                   : IsExpandingLoad ? Intrinsic::riscv_vluxei_mask
+                                     : Intrinsic::riscv_vle_mask;
   SmallVector<SDValue, 8> Ops{Chain, DAG.getTargetConstant(IntID, DL, XLenVT)};
   if (IsUnmasked)
     Ops.push_back(DAG.getUNDEF(ContainerVT));
   else
     Ops.push_back(PassThru);
   Ops.push_back(BasePtr);
-  if (!IsUnmasked)
+  if (!IsUnmasked) {
+    if (IsExpandingLoad)
+      Ops.push_back(Index);
     Ops.push_back(Mask);
+  }
   Ops.push_back(VL);
   if (!IsUnmasked)
     Ops.push_back(DAG.getTargetConstant(RISCVII::TAIL_AGNOSTIC, DL, XLenVT));
diff --git a/llvm/lib/Target/RISCV/RISCVTargetTransformInfo.cpp b/llvm/lib/Target/RISCV/RISCVTargetTransformInfo.cpp
index 395baa5f1aab99..d1d54990ab15ce 100644
--- a/llvm/lib/Target/RISCV/RISCVTargetTransformInfo.cpp
+++ b/llvm/lib/Target/RISCV/RISCVTargetTransformInfo.cpp
@@ -2286,6 +2286,16 @@ bool RISCVTTIImpl::isLSRCostLess(const TargetTransformInfo::LSRCost &C1,
                   C2.ScaleCost, C2.ImmCost, C2.SetupCost);
 }
 
+bool RISCVTTIImpl::isLegalMaskedExpandLoad(Type *DataTy, Align Alignment) {
+  auto *VTy = dyn_cast<VectorType>(DataTy);
+  if (!VTy || VTy->isScalableTy())
+    return false;
+
+  if (!isLegalMaskedLoadStore(DataTy, Alignment))
+    return false;
+  return true;
+}
+
 bool RISCVTTIImpl::isLegalMaskedCompressStore(Type *DataTy, Align Alignment) {
   auto *VTy = dyn_cast<VectorType>(DataTy);
   if (!VTy || VTy->isScalableTy())
diff --git a/llvm/lib/Target/RISCV/RISCVTargetTransformInfo.h b/llvm/lib/Target/RISCV/RISCVTargetTransformInfo.h
index 13d28e4db49cd9..29a6c68a6c585a 100644
--- a/llvm/lib/Target/RISCV/RISCVTargetTransformInfo.h
+++ b/llvm/lib/Target/RISCV/RISCVTargetTransformInfo.h
@@ -301,6 +301,8 @@ class RISCVTTIImpl : public BasicTTIImplBase<RISCVTTIImpl> {
                                              DL);
   }
 
+  bool isLegalMaskedExpandLoad(Type *DataType, Align Alignment);
+
   bool isLegalMaskedCompressStore(Type *DataTy, Align Alignment);
 
   bool isVScaleKnownToBeAPowerOfTwo() const {
diff --git a/llvm/test/CodeGen/RISCV/rvv/expandload.ll b/llvm/test/CodeGen/RISCV/rvv/expandload.ll
new file mode 100644
index 00000000000000..ae51beb0272853
--- /dev/null
+++ b/llvm/test/CodeGen/RISCV/rvv/expandload.ll
@@ -0,0 +1,1549 @@
+; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 5
+; RUN: llc -verify-machineinstrs -mtriple=riscv64 -mattr=+v,+d,+m,+zbb %s -o - | FileCheck %s --check-prefix=RV64
+; RUN: llc -verify-machineinstrs -mtriple=riscv32 -mattr=+v,+d,+m,+zbb %s -o - | FileCheck %s --check-prefix=RV32
+
+; Load + expand for i8 type
+
+define <1 x i8> @test_expandload_v1i8(ptr %base, <1 x i1> %mask, <1 x i8> %passthru) {
+; RV64-LABEL: test_expandload_v1i8:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 1, e8, mf8, ta, mu
+; RV64-NEXT:    viota.m v9, v0
+; RV64-NEXT:    vluxei8.v v8, (a0), v9, v0.t
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v1i8:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 1, e8, mf8, ta, mu
+; RV32-NEXT:    viota.m v9, v0
+; RV32-NEXT:    vluxei8.v v8, (a0), v9, v0.t
+; RV32-NEXT:    ret
+  %res = call <1 x i8> @llvm.masked.expandload.v1i8(ptr align 1 %base, <1 x i1> %mask, <1 x i8> %passthru)
+  ret <1 x i8> %res
+}
+
+define <1 x i8> @test_expandload_v1i8_all_ones(ptr %base, <1 x i8> %passthru) {
+; RV64-LABEL: test_expandload_v1i8_all_ones:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 1, e8, mf8, ta, ma
+; RV64-NEXT:    vle8.v v8, (a0)
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v1i8_all_ones:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 1, e8, mf8, ta, ma
+; RV32-NEXT:    vle8.v v8, (a0)
+; RV32-NEXT:    ret
+  %res = call <1 x i8> @llvm.masked.expandload.v1i8(ptr align 1 %base, <1 x i1> splat (i1 true), <1 x i8> %passthru)
+  ret <1 x i8> %res
+}
+
+define <2 x i8> @test_expandload_v2i8(ptr %base, <2 x i1> %mask, <2 x i8> %passthru) {
+; RV64-LABEL: test_expandload_v2i8:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 2, e8, mf8, ta, mu
+; RV64-NEXT:    viota.m v9, v0
+; RV64-NEXT:    vluxei8.v v8, (a0), v9, v0.t
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v2i8:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 2, e8, mf8, ta, mu
+; RV32-NEXT:    viota.m v9, v0
+; RV32-NEXT:    vluxei8.v v8, (a0), v9, v0.t
+; RV32-NEXT:    ret
+  %res = call <2 x i8> @llvm.masked.expandload.v2i8(ptr align 1 %base, <2 x i1> %mask, <2 x i8> %passthru)
+  ret <2 x i8> %res
+}
+
+define <2 x i8> @test_expandload_v2i8_all_ones(ptr %base, <2 x i8> %passthru) {
+; RV64-LABEL: test_expandload_v2i8_all_ones:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
+; RV64-NEXT:    vle8.v v8, (a0)
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v2i8_all_ones:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
+; RV32-NEXT:    vle8.v v8, (a0)
+; RV32-NEXT:    ret
+  %res = call <2 x i8> @llvm.masked.expandload.v2i8(ptr align 1 %base, <2 x i1> splat (i1 true), <2 x i8> %passthru)
+  ret <2 x i8> %res
+}
+
+define <4 x i8> @test_expandload_v4i8(ptr %base, <4 x i1> %mask, <4 x i8> %passthru) {
+; RV64-LABEL: test_expandload_v4i8:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 4, e8, mf4, ta, mu
+; RV64-NEXT:    viota.m v9, v0
+; RV64-NEXT:    vluxei8.v v8, (a0), v9, v0.t
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v4i8:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 4, e8, mf4, ta, mu
+; RV32-NEXT:    viota.m v9, v0
+; RV32-NEXT:    vluxei8.v v8, (a0), v9, v0.t
+; RV32-NEXT:    ret
+  %res = call <4 x i8> @llvm.masked.expandload.v4i8(ptr align 1 %base, <4 x i1> %mask, <4 x i8> %passthru)
+  ret <4 x i8> %res
+}
+
+define <4 x i8> @test_expandload_v4i8_all_ones(ptr %base, <4 x i8> %passthru) {
+; RV64-LABEL: test_expandload_v4i8_all_ones:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 4, e8, mf4, ta, ma
+; RV64-NEXT:    vle8.v v8, (a0)
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v4i8_all_ones:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 4, e8, mf4, ta, ma
+; RV32-NEXT:    vle8.v v8, (a0)
+; RV32-NEXT:    ret
+  %res = call <4 x i8> @llvm.masked.expandload.v4i8(ptr align 1 %base, <4 x i1> splat (i1 true), <4 x i8> %passthru)
+  ret <4 x i8> %res
+}
+
+define <8 x i8> @test_expandload_v8i8(ptr %base, <8 x i1> %mask, <8 x i8> %passthru) {
+; RV64-LABEL: test_expandload_v8i8:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 8, e8, mf2, ta, mu
+; RV64-NEXT:    viota.m v9, v0
+; RV64-NEXT:    vluxei8.v v8, (a0), v9, v0.t
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v8i8:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 8, e8, mf2, ta, mu
+; RV32-NEXT:    viota.m v9, v0
+; RV32-NEXT:    vluxei8.v v8, (a0), v9, v0.t
+; RV32-NEXT:    ret
+  %res = call <8 x i8> @llvm.masked.expandload.v8i8(ptr align 1 %base, <8 x i1> %mask, <8 x i8> %passthru)
+  ret <8 x i8> %res
+}
+
+define <8 x i8> @test_expandload_v8i8_all_ones(ptr %base, <8 x i8> %passthru) {
+; RV64-LABEL: test_expandload_v8i8_all_ones:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
+; RV64-NEXT:    vle8.v v8, (a0)
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v8i8_all_ones:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
+; RV32-NEXT:    vle8.v v8, (a0)
+; RV32-NEXT:    ret
+  %res = call <8 x i8> @llvm.masked.expandload.v8i8(ptr align 1 %base, <8 x i1> splat (i1 true), <8 x i8> %passthru)
+  ret <8 x i8> %res
+}
+
+define <16 x i8> @test_expandload_v16i8(ptr %base, <16 x i1> %mask, <16 x i8> %passthru) {
+; RV64-LABEL: test_expandload_v16i8:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 16, e8, m1, ta, mu
+; RV64-NEXT:    viota.m v9, v0
+; RV64-NEXT:    vluxei8.v v8, (a0), v9, v0.t
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v16i8:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 16, e8, m1, ta, mu
+; RV32-NEXT:    viota.m v9, v0
+; RV32-NEXT:    vluxei8.v v8, (a0), v9, v0.t
+; RV32-NEXT:    ret
+  %res = call <16 x i8> @llvm.masked.expandload.v16i8(ptr align 1 %base, <16 x i1> %mask, <16 x i8> %passthru)
+  ret <16 x i8> %res
+}
+
+define <16 x i8> @test_expandload_v16i8_all_ones(ptr %base, <16 x i8> %passthru) {
+; RV64-LABEL: test_expandload_v16i8_all_ones:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 16, e8, m1, ta, ma
+; RV64-NEXT:    vle8.v v8, (a0)
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v16i8_all_ones:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 16, e8, m1, ta, ma
+; RV32-NEXT:    vle8.v v8, (a0)
+; RV32-NEXT:    ret
+  %res = call <16 x i8> @llvm.masked.expandload.v16i8(ptr align 1 %base, <16 x i1> splat (i1 true), <16 x i8> %passthru)
+  ret <16 x i8> %res
+}
+
+define <32 x i8> @test_expandload_v32i8(ptr %base, <32 x i1> %mask, <32 x i8> %passthru) {
+; RV64-LABEL: test_expandload_v32i8:
+; RV64:       # %bb.0:
+; RV64-NEXT:    li a1, 32
+; RV64-NEXT:    vsetvli zero, a1, e8, m2, ta, mu
+; RV64-NEXT:    viota.m v10, v0
+; RV64-NEXT:    vluxei8.v v8, (a0), v10, v0.t
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v32i8:
+; RV32:       # %bb.0:
+; RV32-NEXT:    li a1, 32
+; RV32-NEXT:    vsetvli zero, a1, e8, m2, ta, mu
+; RV32-NEXT:    viota.m v10, v0
+; RV32-NEXT:    vluxei8.v v8, (a0), v10, v0.t
+; RV32-NEXT:    ret
+  %res = call <32 x i8> @llvm.masked.expandload.v32i8(ptr align 1 %base, <32 x i1> %mask, <32 x i8> %passthru)
+  ret <32 x i8> %res
+}
+
+define <32 x i8> @test_expandload_v32i8_all_ones(ptr %base, <32 x i8> %passthru) {
+; RV64-LABEL: test_expandload_v32i8_all_ones:
+; RV64:       # %bb.0:
+; RV64-NEXT:    li a1, 32
+; RV64-NEXT:    vsetvli zero, a1, e8, m2, ta, ma
+; RV64-NEXT:    vle8.v v8, (a0)
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v32i8_all_ones:
+; RV32:       # %bb.0:
+; RV32-NEXT:    li a1, 32
+; RV32-NEXT:    vsetvli zero, a1, e8, m2, ta, ma
+; RV32-NEXT:    vle8.v v8, (a0)
+; RV32-NEXT:    ret
+  %res = call <32 x i8> @llvm.masked.expandload.v32i8(ptr align 1 %base, <32 x i1> splat (i1 true), <32 x i8> %passthru)
+  ret <32 x i8> %res
+}
+
+define <64 x i8> @test_expandload_v64i8(ptr %base, <64 x i1> %mask, <64 x i8> %passthru) {
+; RV64-LABEL: test_expandload_v64i8:
+; RV64:       # %bb.0:
+; RV64-NEXT:    li a1, 64
+; RV64-NEXT:    vsetvli zero, a1, e8, m4, ta, mu
+; RV64-NEXT:    viota.m v12, v0
+; RV64-NEXT:    vluxei8.v v8, (a0), v12, v0.t
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v64i8:
+; RV32:       # %bb.0:
+; RV32-NEXT:    li a1, 64
+; RV32-NEXT:    vsetvli zero, a1, e8, m4, ta, mu
+; RV32-NEXT:    viota.m v12, v0
+; RV32-NEXT:    vluxei8.v v8, (a0), v12, v0.t
+; RV32-NEXT:    ret
+  %res = call <64 x i8> @llvm.masked.expandload.v64i8(ptr align 1 %base, <64 x i1> %mask, <64 x i8> %passthru)
+  ret <64 x i8> %res
+}
+
+define <64 x i8> @test_expandload_v64i8_all_ones(ptr %base, <64 x i8> %passthru) {
+; RV64-LABEL: test_expandload_v64i8_all_ones:
+; RV64:       # %bb.0:
+; RV64-NEXT:    li a1, 64
+; RV64-NEXT:    vsetvli zero, a1, e8, m4, ta, ma
+; RV64-NEXT:    vle8.v v8, (a0)
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v64i8_all_ones:
+; RV32:       # %bb.0:
+; RV32-NEXT:    li a1, 64
+; RV32-NEXT:    vsetvli zero, a1, e8, m4, ta, ma
+; RV32-NEXT:    vle8.v v8, (a0)
+; RV32-NEXT:    ret
+  %res = call <64 x i8> @llvm.masked.expandload.v64i8(ptr align 1 %base, <64 x i1> splat (i1 true), <64 x i8> %passthru)
+  ret <64 x i8> %res
+}
+
+define <128 x i8> @test_expandload_v128i8(ptr %base, <128 x i1> %mask, <128 x i8> %passthru) {
+; RV64-LABEL: test_expandload_v128i8:
+; RV64:       # %bb.0:
+; RV64-NEXT:    li a1, 128
+; RV64-NEXT:    vsetvli zero, a1, e8, m8, ta, mu
+; RV64-NEXT:    viota.m v16, v0
+; RV64-NEXT:    vluxei8.v v8, (a0), v16, v0.t
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v128i8:
+; RV32:       # %bb.0:
+; RV32-NEXT:    li a1, 128
+; RV32-NEXT:    vsetvli zero, a1, e8, m8, ta, mu
+; RV32-NEXT:    viota.m v16, v0
+; RV32-NEXT:    vluxei8.v v8, (a0), v16, v0.t
+; RV32-NEXT:    ret
+  %res = call <128 x i8> @llvm.masked.expandload.v128i8(ptr align 1 %base, <128 x i1> %mask, <128 x i8> %passthru)
+  ret <128 x i8> %res
+}
+
+define <128 x i8> @test_expandload_v128i8_all_ones(ptr %base, <128 x i8> %passthru) {
+; RV64-LABEL: test_expandload_v128i8_all_ones:
+; RV64:       # %bb.0:
+; RV64-NEXT:    li a1, 128
+; RV64-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
+; RV64-NEXT:    vle8.v v8, (a0)
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v128i8_all_ones:
+; RV32:       # %bb.0:
+; RV32-NEXT:    li a1, 128
+; RV32-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
+; RV32-NEXT:    vle8.v v8, (a0)
+; RV32-NEXT:    ret
+  %res = call <128 x i8> @llvm.masked.expandload.v128i8(ptr align 1 %base, <128 x i1> splat (i1 true), <128 x i8> %passthru)
+  ret <128 x i8> %res
+}
+
+define <256 x i8> @test_expandload_v256i8(ptr %base, <256 x i1> %mask, <256 x i8> %passthru) {
+; RV64-LABEL: test_expandload_v256i8:
+; RV64:       # %bb.0:
+; RV64-NEXT:    addi sp, sp, -16
+; RV64-NEXT:    .cfi_def_cfa_offset 16
+; RV64-NEXT:    csrr a2, vlenb
+; RV64-NEXT:    slli a2, a2, 4
+; RV64-NEXT:    sub sp, sp, a2
+; RV64-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
+; RV64-NEXT:    addi a2, sp, 16
+; RV64-NEXT:    vs8r.v v16, (a2) # Unknown-size Folded Spill
+; RV64-NEXT:    vmv1r.v v9, v0
+; RV64-NEXT:    li a2, 128
+; RV64-NEXT:    vsetvli zero, a2, e8, m8, ta, ma
+; RV64-NEXT:    vle8.v v16, (a1)
+; RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; RV64-NEXT:    vslidedown.vi v10, v0, 1
+; RV64-NEXT:    vmv.x.s a1, v10
+; RV64-NEXT:    vmv.x.s a3, v0
+; RV64-NEXT:    vsetvli zero, a2, e8, m8, ta, mu
+; RV64-NEXT:    viota.m v24, v8
+; RV64-NEXT:    csrr a2, vlenb
+; RV64-NEXT:    slli a2, a2, 3
+; RV64-NEXT:    add a2, sp, a2
+; RV64-NEXT:    addi a2, a2, 16
+; RV64-NEXT:    vs8r.v v24, (a2) # Unknown-size Folded Spill
+; RV64-NEXT:    cpop a2, a3
+; RV64-NEXT:    cpop a1, a1
+; RV64-NEXT:    add a2, a0, a2
+; RV64-NEXT:    add a1, a2, a1
+; RV64-NEXT:    vmv1r.v v0, v8
+; RV64-NEXT:    csrr a2, vlenb
+; RV64-NEXT:    slli a2, a2, 3
+; RV64-NEXT:    add a2, sp, a2
+; RV64-NEXT:    addi a2, a2, 16
+; RV64-NEXT:    vl8r.v v24, (a2) # Unknown-size Folded Reload
+; RV64-NEXT:    vluxei8.v v16, (a1), v24, v0.t
+; RV64-NEXT:    viota.m v24, v9
+; RV64-NEXT:    vmv1r.v v0, v9
+; RV64-NEXT:    addi a1, sp, 16
+; RV64-NEXT:    vl8r.v v8, (a1) # Unknown-size Folded Reload
+; RV64-NEXT:    vluxei8.v v8, (a0), v24, v0.t
+; RV64-NEXT:    csrr a0, vlenb
+; RV64-NEXT:    slli a0, a0, 4
+; RV64-NEXT:    add sp, sp, a0
+; RV64-NEXT:    addi sp, sp, 16
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v256i8:
+; RV32:       # %bb.0:
+; RV32-NEXT:    addi sp, sp, -16
+; RV32-NEXT:    .cfi_def_cfa_offset 16
+; RV32-NEXT:    csrr a2, vlenb
+; RV32-NEXT:    slli a2, a2, 4
+; RV32-NEXT:    sub sp, sp, a2
+; RV32-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
+; RV32-NEXT:    addi a2, sp, 16
+; RV32-NEXT:    vs8r.v v16, (a2) # Unknown-size Folded Spill
+; RV32-NEXT:    vmv1r.v v9, v0
+; RV32-NEXT:    li a2, 128
+; RV32-NEXT:    vsetvli zero, a2, e8, m8, ta, ma
+; RV32-NEXT:    vle8.v v16, (a1)
+; RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; RV32-NEXT:    vslidedown.vi v10, v0, 1
+; RV32-NEXT:    li a1, 32
+; RV32-NEXT:    vsrl.vx v11, v10, a1
+; RV32-NEXT:    vmv.x.s a3, v11
+; RV32-NEXT:    vsrl.vx v11, v0, a1
+; RV32-NEXT:    vmv.x.s a1, v11
+; RV32-NEXT:    vmv.x.s a4, v10
+; RV32-NEXT:    vmv.x.s a5, v0
+; RV32-NEXT:    cpop a1, a1
+; RV32-NEXT:    cpop a5, a5
+; RV32-NEXT:    add a1, a5, a1
+; RV32-NEXT:    cpop a3, a3
+; RV32-NEXT:    cpop a4, a4
+; RV32-NEXT:    add a3, a4, a3
+; RV32-NEXT:    add a1, a1, a3
+; RV32-NEXT:    add a1, a0, a1
+; RV32-NEXT:    vsetvli zero, a2, e8, m8, ta, mu
+; RV32-NEXT:    viota.m v24, v8
+; RV32-NEXT:    csrr a2, vlenb
+; RV32-NEXT:    slli a2, a2, 3
+; RV32-NEXT:    add a2, sp, a2
+; RV32-NEXT:    addi a2, a2, 16
+; RV32-NEXT:    vs8r.v v24, (a2) # Unknown-size Folded Spill
+; RV32-NEXT:    vmv1r.v v0, v8
+; RV32-NEXT:    csrr a2, vlenb
+; RV32-NEXT:    slli a2, a2, 3
+; RV32-NEXT:    add a2, sp, a2
+; RV32-NEXT:    addi a2, a2, 16
+; RV32-NEXT:    vl8r.v v24, (a2) # Unknown-size Folded Reload
+; RV32-NEXT:    vluxei8.v v16, (a1), v24, v0.t
+; RV32-NEXT:    viota.m v24, v9
+; RV32-NEXT:    vmv1r.v v0, v9
+; RV32-NEXT:    addi a1, sp, 16
+; RV32-NEXT:    vl8r.v v8, (a1) # Unknown-size Folded Reload
+; RV32-NEXT:    vluxei8.v v8, (a0), v24, v0.t
+; RV32-NEXT:    csrr a0, vlenb
+; RV32-NEXT:    slli a0, a0, 4
+; RV32-NEXT:    add sp, sp, a0
+; RV32-NEXT:    addi sp, sp, 16
+; RV32-NEXT:    ret
+  %res = call <256 x i8> @llvm.masked.expandload.v256i8(ptr align 1 %base, <256 x i1> %mask, <256 x i8> %passthru)
+  ret <256 x i8> %res
+}
+
+define <256 x i8> @test_expandload_v256i8_all_ones(ptr %base, <256 x i8> %passthru) {
+; RV64-LABEL: test_expandload_v256i8_all_ones:
+; RV64:       # %bb.0:
+; RV64-NEXT:    li a1, 128
+; RV64-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
+; RV64-NEXT:    vle8.v v8, (a0)
+; RV64-NEXT:    vmset.m v16
+; RV64-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
+; RV64-NEXT:    vmv.x.s a2, v16
+; RV64-NEXT:    cpop a2, a2
+; RV64-NEXT:    vslidedown.vi v16, v16, 1
+; RV64-NEXT:    vmv.x.s a3, v16
+; RV64-NEXT:    cpop a3, a3
+; RV64-NEXT:    add a0, a0, a2
+; RV64-NEXT:    add a0, a0, a3
+; RV64-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
+; RV64-NEXT:    vle8.v v16, (a0)
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v256i8_all_ones:
+; RV32:       # %bb.0:
+; RV32-NEXT:    li a1, 128
+; RV32-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
+; RV32-NEXT:    vmset.m v8
+; RV32-NEXT:    li a2, 32
+; RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; RV32-NEXT:    vsrl.vx v9, v8, a2
+; RV32-NEXT:    vmv.x.s a3, v9
+; RV32-NEXT:    cpop a3, a3
+; RV32-NEXT:    vmv.x.s a4, v8
+; RV32-NEXT:    cpop a4, a4
+; RV32-NEXT:    add a3, a4, a3
+; RV32-NEXT:    vslidedown.vi v8, v8, 1
+; RV32-NEXT:    vsrl.vx v9, v8, a2
+; RV32-NEXT:    vmv.x.s a2, v9
+; RV32-NEXT:    cpop a2, a2
+; RV32-NEXT:    vmv.x.s a4, v8
+; RV32-NEXT:    cpop a4, a4
+; RV32-NEXT:    add a2, a4, a2
+; RV32-NEXT:    add a3, a0, a3
+; RV32-NEXT:    add a2, a3, a2
+; RV32-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
+; RV32-NEXT:    vle8.v v16, (a2)
+; RV32-NEXT:    vle8.v v8, (a0)
+; RV32-NEXT:    ret
+  %res = call <256 x i8> @llvm.masked.expandload.v256i8(ptr align 1 %base, <256 x i1> splat (i1 true), <256 x i8> %passthru)
+  ret <256 x i8> %res
+}
+
+declare <1 x i8> @llvm.masked.expandload.v1i8(ptr, <1 x i1>, <1 x i8>)
+declare <2 x i8> @llvm.masked.expandload.v2i8(ptr, <2 x i1>, <2 x i8>)
+declare <4 x i8> @llvm.masked.expandload.v4i8(ptr, <4 x i1>, <4 x i8>)
+declare <8 x i8> @llvm.masked.expandload.v8i8(ptr, <8 x i1>, <8 x i8>)
+declare <16 x i8> @llvm.masked.expandload.v16i8(ptr, <16 x i1>, <16 x i8>)
+declare <32 x i8> @llvm.masked.expandload.v32i8(ptr, <32 x i1>, <32 x i8>)
+declare <64 x i8> @llvm.masked.expandload.v64i8(ptr, <64 x i1>, <64 x i8>)
+declare <128 x i8> @llvm.masked.expandload.v128i8(ptr, <128 x i1>, <128 x i8>)
+declare <256 x i8> @llvm.masked.expandload.v256i8(ptr, <256 x i1>, <256 x i8>)
+
+; Compress + store for i16 type
+
+define <1 x i16> @test_expandload_v1i16(ptr %base, <1 x i1> %mask, <1 x i16> %passthru) {
+; RV64-LABEL: test_expandload_v1i16:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 1, e16, mf4, ta, ma
+; RV64-NEXT:    viota.m v9, v0
+; RV64-NEXT:    vsll.vi v9, v9, 1, v0.t
+; RV64-NEXT:    vsetvli zero, zero, e16, mf4, ta, mu
+; RV64-NEXT:    vluxei16.v v8, (a0), v9, v0.t
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v1i16:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 1, e16, mf4, ta, ma
+; RV32-NEXT:    viota.m v9, v0
+; RV32-NEXT:    vsll.vi v9, v9, 1, v0.t
+; RV32-NEXT:    vsetvli zero, zero, e16, mf4, ta, mu
+; RV32-NEXT:    vluxei16.v v8, (a0), v9, v0.t
+; RV32-NEXT:    ret
+  %res = call <1 x i16> @llvm.masked.expandload.v1i16(ptr align 2 %base, <1 x i1> %mask, <1 x i16> %passthru)
+  ret <1 x i16> %res
+}
+
+define <1 x i16> @test_expandload_v1i16_all_ones(ptr %base, <1 x i16> %passthru) {
+; RV64-LABEL: test_expandload_v1i16_all_ones:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 1, e16, mf4, ta, ma
+; RV64-NEXT:    vle16.v v8, (a0)
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v1i16_all_ones:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 1, e16, mf4, ta, ma
+; RV32-NEXT:    vle16.v v8, (a0)
+; RV32-NEXT:    ret
+  %res = call <1 x i16> @llvm.masked.expandload.v1i16(ptr align 2 %base, <1 x i1> splat (i1 true), <1 x i16> %passthru)
+  ret <1 x i16> %res
+}
+
+define <2 x i16> @test_expandload_v2i16(ptr %base, <2 x i1> %mask, <2 x i16> %passthru) {
+; RV64-LABEL: test_expandload_v2i16:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 2, e16, mf4, ta, ma
+; RV64-NEXT:    viota.m v9, v0
+; RV64-NEXT:    vsll.vi v9, v9, 1, v0.t
+; RV64-NEXT:    vsetvli zero, zero, e16, mf4, ta, mu
+; RV64-NEXT:    vluxei16.v v8, (a0), v9, v0.t
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v2i16:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 2, e16, mf4, ta, ma
+; RV32-NEXT:    viota.m v9, v0
+; RV32-NEXT:    vsll.vi v9, v9, 1, v0.t
+; RV32-NEXT:    vsetvli zero, zero, e16, mf4, ta, mu
+; RV32-NEXT:    vluxei16.v v8, (a0), v9, v0.t
+; RV32-NEXT:    ret
+  %res = call <2 x i16> @llvm.masked.expandload.v2i16(ptr align 2 %base, <2 x i1> %mask, <2 x i16> %passthru)
+  ret <2 x i16> %res
+}
+
+define <2 x i16> @test_expandload_v2i16_all_ones(ptr %base, <2 x i16> %passthru) {
+; RV64-LABEL: test_expandload_v2i16_all_ones:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 2, e16, mf4, ta, ma
+; RV64-NEXT:    vle16.v v8, (a0)
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v2i16_all_ones:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 2, e16, mf4, ta, ma
+; RV32-NEXT:    vle16.v v8, (a0)
+; RV32-NEXT:    ret
+  %res = call <2 x i16> @llvm.masked.expandload.v2i16(ptr align 2 %base, <2 x i1> splat (i1 true), <2 x i16> %passthru)
+  ret <2 x i16> %res
+}
+
+define <4 x i16> @test_expandload_v4i16(ptr %base, <4 x i1> %mask, <4 x i16> %passthru) {
+; RV64-LABEL: test_expandload_v4i16:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 4, e16, mf2, ta, ma
+; RV64-NEXT:    viota.m v9, v0
+; RV64-NEXT:    vsll.vi v9, v9, 1, v0.t
+; RV64-NEXT:    vsetvli zero, zero, e16, mf2, ta, mu
+; RV64-NEXT:    vluxei16.v v8, (a0), v9, v0.t
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v4i16:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 4, e16, mf2, ta, ma
+; RV32-NEXT:    viota.m v9, v0
+; RV32-NEXT:    vsll.vi v9, v9, 1, v0.t
+; RV32-NEXT:    vsetvli zero, zero, e16, mf2, ta, mu
+; RV32-NEXT:    vluxei16.v v8, (a0), v9, v0.t
+; RV32-NEXT:    ret
+  %res = call <4 x i16> @llvm.masked.expandload.v4i16(ptr align 2 %base, <4 x i1> %mask, <4 x i16> %passthru)
+  ret <4 x i16> %res
+}
+
+define <4 x i16> @test_expandload_v4i16_all_ones(ptr %base, <4 x i16> %passthru) {
+; RV64-LABEL: test_expandload_v4i16_all_ones:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 4, e16, mf2, ta, ma
+; RV64-NEXT:    vle16.v v8, (a0)
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v4i16_all_ones:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 4, e16, mf2, ta, ma
+; RV32-NEXT:    vle16.v v8, (a0)
+; RV32-NEXT:    ret
+  %res = call <4 x i16> @llvm.masked.expandload.v4i16(ptr align 2 %base, <4 x i1> splat (i1 true), <4 x i16> %passthru)
+  ret <4 x i16> %res
+}
+
+define <8 x i16> @test_expandload_v8i16(ptr %base, <8 x i1> %mask, <8 x i16> %passthru) {
+; RV64-LABEL: test_expandload_v8i16:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 8, e16, m1, ta, ma
+; RV64-NEXT:    viota.m v9, v0
+; RV64-NEXT:    vsll.vi v9, v9, 1, v0.t
+; RV64-NEXT:    vsetvli zero, zero, e16, m1, ta, mu
+; RV64-NEXT:    vluxei16.v v8, (a0), v9, v0.t
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v8i16:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 8, e16, m1, ta, ma
+; RV32-NEXT:    viota.m v9, v0
+; RV32-NEXT:    vsll.vi v9, v9, 1, v0.t
+; RV32-NEXT:    vsetvli zero, zero, e16, m1, ta, mu
+; RV32-NEXT:    vluxei16.v v8, (a0), v9, v0.t
+; RV32-NEXT:    ret
+  %res = call <8 x i16> @llvm.masked.expandload.v8i16(ptr align 2 %base, <8 x i1> %mask, <8 x i16> %passthru)
+  ret <8 x i16> %res
+}
+
+define <8 x i16> @test_expandload_v8i16_all_ones(ptr %base, <8 x i16> %passthru) {
+; RV64-LABEL: test_expandload_v8i16_all_ones:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 8, e16, m1, ta, ma
+; RV64-NEXT:    vle16.v v8, (a0)
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v8i16_all_ones:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 8, e16, m1, ta, ma
+; RV32-NEXT:    vle16.v v8, (a0)
+; RV32-NEXT:    ret
+  %res = call <8 x i16> @llvm.masked.expandload.v8i16(ptr align 2 %base, <8 x i1> splat (i1 true), <8 x i16> %passthru)
+  ret <8 x i16> %res
+}
+
+define <16 x i16> @test_expandload_v16i16(ptr %base, <16 x i1> %mask, <16 x i16> %passthru) {
+; RV64-LABEL: test_expandload_v16i16:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 16, e16, m2, ta, ma
+; RV64-NEXT:    viota.m v10, v0
+; RV64-NEXT:    vsll.vi v10, v10, 1, v0.t
+; RV64-NEXT:    vsetvli zero, zero, e16, m2, ta, mu
+; RV64-NEXT:    vluxei16.v v8, (a0), v10, v0.t
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v16i16:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 16, e16, m2, ta, ma
+; RV32-NEXT:    viota.m v10, v0
+; RV32-NEXT:    vsll.vi v10, v10, 1, v0.t
+; RV32-NEXT:    vsetvli zero, zero, e16, m2, ta, mu
+; RV32-NEXT:    vluxei16.v v8, (a0), v10, v0.t
+; RV32-NEXT:    ret
+  %res = call <16 x i16> @llvm.masked.expandload.v16i16(ptr align 2 %base, <16 x i1> %mask, <16 x i16> %passthru)
+  ret <16 x i16> %res
+}
+
+define <16 x i16> @test_expandload_v16i16_all_ones(ptr %base, <16 x i16> %passthru) {
+; RV64-LABEL: test_expandload_v16i16_all_ones:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 16, e16, m2, ta, ma
+; RV64-NEXT:    vle16.v v8, (a0)
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v16i16_all_ones:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 16, e16, m2, ta, ma
+; RV32-NEXT:    vle16.v v8, (a0)
+; RV32-NEXT:    ret
+  %res = call <16 x i16> @llvm.masked.expandload.v16i16(ptr align 2 %base, <16 x i1> splat (i1 true), <16 x i16> %passthru)
+  ret <16 x i16> %res
+}
+
+define <32 x i16> @test_expandload_v32i16(ptr %base, <32 x i1> %mask, <32 x i16> %passthru) {
+; RV64-LABEL: test_expandload_v32i16:
+; RV64:       # %bb.0:
+; RV64-NEXT:    li a1, 32
+; RV64-NEXT:    vsetvli zero, a1, e16, m4, ta, ma
+; RV64-NEXT:    viota.m v12, v0
+; RV64-NEXT:    vsll.vi v12, v12, 1, v0.t
+; RV64-NEXT:    vsetvli zero, zero, e16, m4, ta, mu
+; RV64-NEXT:    vluxei16.v v8, (a0), v12, v0.t
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v32i16:
+; RV32:       # %bb.0:
+; RV32-NEXT:    li a1, 32
+; RV32-NEXT:    vsetvli zero, a1, e16, m4, ta, ma
+; RV32-NEXT:    viota.m v12, v0
+; RV32-NEXT:    vsll.vi v12, v12, 1, v0.t
+; RV32-NEXT:    vsetvli zero, zero, e16, m4, ta, mu
+; RV32-NEXT:    vluxei16.v v8, (a0), v12, v0.t
+; RV32-NEXT:    ret
+  %res = call <32 x i16> @llvm.masked.expandload.v32i16(ptr align 2 %base, <32 x i1> %mask, <32 x i16> %passthru)
+  ret <32 x i16> %res
+}
+
+define <32 x i16> @test_expandload_v32i16_all_ones(ptr %base, <32 x i16> %passthru) {
+; RV64-LABEL: test_expandload_v32i16_all_ones:
+; RV64:       # %bb.0:
+; RV64-NEXT:    li a1, 32
+; RV64-NEXT:    vsetvli zero, a1, e16, m4, ta, ma
+; RV64-NEXT:    vle16.v v8, (a0)
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v32i16_all_ones:
+; RV32:       # %bb.0:
+; RV32-NEXT:    li a1, 32
+; RV32-NEXT:    vsetvli zero, a1, e16, m4, ta, ma
+; RV32-NEXT:    vle16.v v8, (a0)
+; RV32-NEXT:    ret
+  %res = call <32 x i16> @llvm.masked.expandload.v32i16(ptr align 2 %base, <32 x i1> splat (i1 true), <32 x i16> %passthru)
+  ret <32 x i16> %res
+}
+
+define <64 x i16> @test_expandload_v64i16(ptr %base, <64 x i1> %mask, <64 x i16> %passthru) {
+; RV64-LABEL: test_expandload_v64i16:
+; RV64:       # %bb.0:
+; RV64-NEXT:    li a1, 64
+; RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; RV64-NEXT:    viota.m v16, v0
+; RV64-NEXT:    vsll.vi v16, v16, 1, v0.t
+; RV64-NEXT:    vsetvli zero, zero, e16, m8, ta, mu
+; RV64-NEXT:    vluxei16.v v8, (a0), v16, v0.t
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v64i16:
+; RV32:       # %bb.0:
+; RV32-NEXT:    li a1, 64
+; RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; RV32-NEXT:    viota.m v16, v0
+; RV32-NEXT:    vsll.vi v16, v16, 1, v0.t
+; RV32-NEXT:    vsetvli zero, zero, e16, m8, ta, mu
+; RV32-NEXT:    vluxei16.v v8, (a0), v16, v0.t
+; RV32-NEXT:    ret
+  %res = call <64 x i16> @llvm.masked.expandload.v64i16(ptr align 2 %base, <64 x i1> %mask, <64 x i16> %passthru)
+  ret <64 x i16> %res
+}
+
+define <64 x i16> @test_expandload_v64i16_all_ones(ptr %base, <64 x i16> %passthru) {
+; RV64-LABEL: test_expandload_v64i16_all_ones:
+; RV64:       # %bb.0:
+; RV64-NEXT:    li a1, 64
+; RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; RV64-NEXT:    vle16.v v8, (a0)
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v64i16_all_ones:
+; RV32:       # %bb.0:
+; RV32-NEXT:    li a1, 64
+; RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; RV32-NEXT:    vle16.v v8, (a0)
+; RV32-NEXT:    ret
+  %res = call <64 x i16> @llvm.masked.expandload.v64i16(ptr align 2 %base, <64 x i1> splat (i1 true), <64 x i16> %passthru)
+  ret <64 x i16> %res
+}
+
+define <128 x i16> @test_expandload_v128i16(ptr %base, <128 x i1> %mask, <128 x i16> %passthru) {
+; RV64-LABEL: test_expandload_v128i16:
+; RV64:       # %bb.0:
+; RV64-NEXT:    addi sp, sp, -16
+; RV64-NEXT:    .cfi_def_cfa_offset 16
+; RV64-NEXT:    csrr a1, vlenb
+; RV64-NEXT:    slli a1, a1, 4
+; RV64-NEXT:    sub sp, sp, a1
+; RV64-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
+; RV64-NEXT:    csrr a1, vlenb
+; RV64-NEXT:    slli a1, a1, 3
+; RV64-NEXT:    add a1, sp, a1
+; RV64-NEXT:    addi a1, a1, 16
+; RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
+; RV64-NEXT:    vmv1r.v v7, v0
+; RV64-NEXT:    vsetivli zero, 8, e8, m1, ta, ma
+; RV64-NEXT:    vslidedown.vi v0, v0, 8
+; RV64-NEXT:    li a1, 64
+; RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; RV64-NEXT:    viota.m v16, v0
+; RV64-NEXT:    vsll.vi v16, v16, 1, v0.t
+; RV64-NEXT:    addi a2, sp, 16
+; RV64-NEXT:    vs8r.v v16, (a2) # Unknown-size Folded Spill
+; RV64-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
+; RV64-NEXT:    vmv.x.s a2, v7
+; RV64-NEXT:    cpop a2, a2
+; RV64-NEXT:    slli a2, a2, 1
+; RV64-NEXT:    add a2, a0, a2
+; RV64-NEXT:    csrr a3, vlenb
+; RV64-NEXT:    slli a3, a3, 3
+; RV64-NEXT:    add a3, sp, a3
+; RV64-NEXT:    addi a3, a3, 16
+; RV64-NEXT:    vl8r.v v16, (a3) # Unknown-size Folded Reload
+; RV64-NEXT:    addi a3, sp, 16
+; RV64-NEXT:    vl8r.v v24, (a3) # Unknown-size Folded Reload
+; RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, mu
+; RV64-NEXT:    vluxei16.v v16, (a2), v24, v0.t
+; RV64-NEXT:    viota.m v24, v7
+; RV64-NEXT:    vmv1r.v v0, v7
+; RV64-NEXT:    vsetvli zero, zero, e16, m8, ta, ma
+; RV64-NEXT:    vsll.vi v24, v24, 1, v0.t
+; RV64-NEXT:    vsetvli zero, zero, e16, m8, ta, mu
+; RV64-NEXT:    vluxei16.v v8, (a0), v24, v0.t
+; RV64-NEXT:    csrr a0, vlenb
+; RV64-NEXT:    slli a0, a0, 4
+; RV64-NEXT:    add sp, sp, a0
+; RV64-NEXT:    addi sp, sp, 16
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v128i16:
+; RV32:       # %bb.0:
+; RV32-NEXT:    addi sp, sp, -16
+; RV32-NEXT:    .cfi_def_cfa_offset 16
+; RV32-NEXT:    csrr a1, vlenb
+; RV32-NEXT:    slli a1, a1, 4
+; RV32-NEXT:    sub sp, sp, a1
+; RV32-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
+; RV32-NEXT:    csrr a1, vlenb
+; RV32-NEXT:    slli a1, a1, 3
+; RV32-NEXT:    add a1, sp, a1
+; RV32-NEXT:    addi a1, a1, 16
+; RV32-NEXT:    vs8r.v v8, (a1) # Unknown-size Folded Spill
+; RV32-NEXT:    vmv1r.v v24, v0
+; RV32-NEXT:    vsetivli zero, 8, e8, m1, ta, ma
+; RV32-NEXT:    vslidedown.vi v0, v0, 8
+; RV32-NEXT:    li a1, 64
+; RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; RV32-NEXT:    viota.m v8, v0
+; RV32-NEXT:    vsll.vi v8, v8, 1, v0.t
+; RV32-NEXT:    addi a2, sp, 16
+; RV32-NEXT:    vs8r.v v8, (a2) # Unknown-size Folded Spill
+; RV32-NEXT:    li a2, 32
+; RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; RV32-NEXT:    vsrl.vx v8, v24, a2
+; RV32-NEXT:    vmv.x.s a2, v8
+; RV32-NEXT:    cpop a2, a2
+; RV32-NEXT:    vmv.x.s a3, v24
+; RV32-NEXT:    cpop a3, a3
+; RV32-NEXT:    add a2, a3, a2
+; RV32-NEXT:    slli a2, a2, 1
+; RV32-NEXT:    add a2, a0, a2
+; RV32-NEXT:    addi a3, sp, 16
+; RV32-NEXT:    vl8r.v v8, (a3) # Unknown-size Folded Reload
+; RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, mu
+; RV32-NEXT:    vluxei16.v v16, (a2), v8, v0.t
+; RV32-NEXT:    viota.m v8, v24
+; RV32-NEXT:    vmv1r.v v0, v24
+; RV32-NEXT:    vsetvli zero, zero, e16, m8, ta, ma
+; RV32-NEXT:    vsll.vi v8, v8, 1, v0.t
+; RV32-NEXT:    addi a1, sp, 16
+; RV32-NEXT:    vs8r.v v8, (a1) # Unknown-size Folded Spill
+; RV32-NEXT:    csrr a1, vlenb
+; RV32-NEXT:    slli a1, a1, 3
+; RV32-NEXT:    add a1, sp, a1
+; RV32-NEXT:    addi a1, a1, 16
+; RV32-NEXT:    vl8r.v v8, (a1) # Unknown-size Folded Reload
+; RV32-NEXT:    addi a1, sp, 16
+; RV32-NEXT:    vl8r.v v24, (a1) # Unknown-size Folded Reload
+; RV32-NEXT:    vsetvli zero, zero, e16, m8, ta, mu
+; RV32-NEXT:    vluxei16.v v8, (a0), v24, v0.t
+; RV32-NEXT:    csrr a0, vlenb
+; RV32-NEXT:    slli a0, a0, 4
+; RV32-NEXT:    add sp, sp, a0
+; RV32-NEXT:    addi sp, sp, 16
+; RV32-NEXT:    ret
+  %res = call <128 x i16> @llvm.masked.expandload.v128i16(ptr align 2 %base, <128 x i1> %mask, <128 x i16> %passthru)
+  ret <128 x i16> %res
+}
+
+define <128 x i16> @test_expandload_v128i16_all_ones(ptr %base, <128 x i16> %passthru) {
+; RV64-LABEL: test_expandload_v128i16_all_ones:
+; RV64:       # %bb.0:
+; RV64-NEXT:    li a1, 64
+; RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; RV64-NEXT:    vle16.v v8, (a0)
+; RV64-NEXT:    vmset.m v16
+; RV64-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
+; RV64-NEXT:    vmv.x.s a2, v16
+; RV64-NEXT:    cpop a2, a2
+; RV64-NEXT:    slli a2, a2, 1
+; RV64-NEXT:    add a0, a0, a2
+; RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; RV64-NEXT:    vle16.v v16, (a0)
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v128i16_all_ones:
+; RV32:       # %bb.0:
+; RV32-NEXT:    li a1, 64
+; RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; RV32-NEXT:    vle16.v v8, (a0)
+; RV32-NEXT:    vmset.m v16
+; RV32-NEXT:    li a2, 32
+; RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; RV32-NEXT:    vsrl.vx v17, v16, a2
+; RV32-NEXT:    vmv.x.s a2, v17
+; RV32-NEXT:    cpop a2, a2
+; RV32-NEXT:    vmv.x.s a3, v16
+; RV32-NEXT:    cpop a3, a3
+; RV32-NEXT:    add a2, a3, a2
+; RV32-NEXT:    slli a2, a2, 1
+; RV32-NEXT:    add a0, a0, a2
+; RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; RV32-NEXT:    vle16.v v16, (a0)
+; RV32-NEXT:    ret
+  %res = call <128 x i16> @llvm.masked.expandload.v128i16(ptr align 2 %base, <128 x i1> splat (i1 true), <128 x i16> %passthru)
+  ret <128 x i16> %res
+}
+
+declare <1 x i16> @llvm.masked.expandload.v1i16(ptr, <1 x i1>, <1 x i16>)
+declare <2 x i16> @llvm.masked.expandload.v2i16(ptr, <2 x i1>, <2 x i16>)
+declare <4 x i16> @llvm.masked.expandload.v4i16(ptr, <4 x i1>, <4 x i16>)
+declare <8 x i16> @llvm.masked.expandload.v8i16(ptr, <8 x i1>, <8 x i16>)
+declare <16 x i16> @llvm.masked.expandload.v16i16(ptr, <16 x i1>, <16 x i16>)
+declare <32 x i16> @llvm.masked.expandload.v32i16(ptr, <32 x i1>, <32 x i16>)
+declare <64 x i16> @llvm.masked.expandload.v64i16(ptr, <64 x i1>, <64 x i16>)
+declare <128 x i16> @llvm.masked.expandload.v128i16(ptr, <128 x i1>, <128 x i16>)
+
+; Compress + store for i32 type
+
+define <1 x i32> @test_expandload_v1i32(ptr %base, <1 x i1> %mask, <1 x i32> %passthru) {
+; RV64-LABEL: test_expandload_v1i32:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 1, e32, mf2, ta, ma
+; RV64-NEXT:    viota.m v9, v0
+; RV64-NEXT:    vsll.vi v9, v9, 2, v0.t
+; RV64-NEXT:    vsetvli zero, zero, e32, mf2, ta, mu
+; RV64-NEXT:    vluxei32.v v8, (a0), v9, v0.t
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v1i32:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 1, e32, mf2, ta, ma
+; RV32-NEXT:    viota.m v9, v0
+; RV32-NEXT:    vsll.vi v9, v9, 2, v0.t
+; RV32-NEXT:    vsetvli zero, zero, e32, mf2, ta, mu
+; RV32-NEXT:    vluxei32.v v8, (a0), v9, v0.t
+; RV32-NEXT:    ret
+  %res = call <1 x i32> @llvm.masked.expandload.v1i32(ptr align 4 %base, <1 x i1> %mask, <1 x i32> %passthru)
+  ret <1 x i32> %res
+}
+
+define <1 x i32> @test_expandload_v1i32_all_ones(ptr %base, <1 x i32> %passthru) {
+; RV64-LABEL: test_expandload_v1i32_all_ones:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 1, e32, mf2, ta, ma
+; RV64-NEXT:    vle32.v v8, (a0)
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v1i32_all_ones:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 1, e32, mf2, ta, ma
+; RV32-NEXT:    vle32.v v8, (a0)
+; RV32-NEXT:    ret
+  %res = call <1 x i32> @llvm.masked.expandload.v1i32(ptr align 4 %base, <1 x i1> splat (i1 true), <1 x i32> %passthru)
+  ret <1 x i32> %res
+}
+
+define <2 x i32> @test_expandload_v2i32(ptr %base, <2 x i1> %mask, <2 x i32> %passthru) {
+; RV64-LABEL: test_expandload_v2i32:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
+; RV64-NEXT:    viota.m v9, v0
+; RV64-NEXT:    vsll.vi v9, v9, 2, v0.t
+; RV64-NEXT:    vsetvli zero, zero, e32, mf2, ta, mu
+; RV64-NEXT:    vluxei32.v v8, (a0), v9, v0.t
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v2i32:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
+; RV32-NEXT:    viota.m v9, v0
+; RV32-NEXT:    vsll.vi v9, v9, 2, v0.t
+; RV32-NEXT:    vsetvli zero, zero, e32, mf2, ta, mu
+; RV32-NEXT:    vluxei32.v v8, (a0), v9, v0.t
+; RV32-NEXT:    ret
+  %res = call <2 x i32> @llvm.masked.expandload.v2i32(ptr align 4 %base, <2 x i1> %mask, <2 x i32> %passthru)
+  ret <2 x i32> %res
+}
+
+define <2 x i32> @test_expandload_v2i32_all_ones(ptr %base, <2 x i32> %passthru) {
+; RV64-LABEL: test_expandload_v2i32_all_ones:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
+; RV64-NEXT:    vle32.v v8, (a0)
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v2i32_all_ones:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
+; RV32-NEXT:    vle32.v v8, (a0)
+; RV32-NEXT:    ret
+  %res = call <2 x i32> @llvm.masked.expandload.v2i32(ptr align 4 %base, <2 x i1> splat (i1 true), <2 x i32> %passthru)
+  ret <2 x i32> %res
+}
+
+define <4 x i32> @test_expandload_v4i32(ptr %base, <4 x i1> %mask, <4 x i32> %passthru) {
+; RV64-LABEL: test_expandload_v4i32:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
+; RV64-NEXT:    viota.m v9, v0
+; RV64-NEXT:    vsll.vi v9, v9, 2, v0.t
+; RV64-NEXT:    vsetvli zero, zero, e32, m1, ta, mu
+; RV64-NEXT:    vluxei32.v v8, (a0), v9, v0.t
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v4i32:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
+; RV32-NEXT:    viota.m v9, v0
+; RV32-NEXT:    vsll.vi v9, v9, 2, v0.t
+; RV32-NEXT:    vsetvli zero, zero, e32, m1, ta, mu
+; RV32-NEXT:    vluxei32.v v8, (a0), v9, v0.t
+; RV32-NEXT:    ret
+  %res = call <4 x i32> @llvm.masked.expandload.v4i32(ptr align 4 %base, <4 x i1> %mask, <4 x i32> %passthru)
+  ret <4 x i32> %res
+}
+
+define <4 x i32> @test_expandload_v4i32_all_ones(ptr %base, <4 x i32> %passthru) {
+; RV64-LABEL: test_expandload_v4i32_all_ones:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
+; RV64-NEXT:    vle32.v v8, (a0)
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v4i32_all_ones:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
+; RV32-NEXT:    vle32.v v8, (a0)
+; RV32-NEXT:    ret
+  %res = call <4 x i32> @llvm.masked.expandload.v4i32(ptr align 4 %base, <4 x i1> splat (i1 true), <4 x i32> %passthru)
+  ret <4 x i32> %res
+}
+
+define <8 x i32> @test_expandload_v8i32(ptr %base, <8 x i1> %mask, <8 x i32> %passthru) {
+; RV64-LABEL: test_expandload_v8i32:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
+; RV64-NEXT:    viota.m v10, v0
+; RV64-NEXT:    vsll.vi v10, v10, 2, v0.t
+; RV64-NEXT:    vsetvli zero, zero, e32, m2, ta, mu
+; RV64-NEXT:    vluxei32.v v8, (a0), v10, v0.t
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v8i32:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
+; RV32-NEXT:    viota.m v10, v0
+; RV32-NEXT:    vsll.vi v10, v10, 2, v0.t
+; RV32-NEXT:    vsetvli zero, zero, e32, m2, ta, mu
+; RV32-NEXT:    vluxei32.v v8, (a0), v10, v0.t
+; RV32-NEXT:    ret
+  %res = call <8 x i32> @llvm.masked.expandload.v8i32(ptr align 4 %base, <8 x i1> %mask, <8 x i32> %passthru)
+  ret <8 x i32> %res
+}
+
+define <8 x i32> @test_expandload_v8i32_all_ones(ptr %base, <8 x i32> %passthru) {
+; RV64-LABEL: test_expandload_v8i32_all_ones:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
+; RV64-NEXT:    vle32.v v8, (a0)
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v8i32_all_ones:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
+; RV32-NEXT:    vle32.v v8, (a0)
+; RV32-NEXT:    ret
+  %res = call <8 x i32> @llvm.masked.expandload.v8i32(ptr align 4 %base, <8 x i1> splat (i1 true), <8 x i32> %passthru)
+  ret <8 x i32> %res
+}
+
+define <16 x i32> @test_expandload_v16i32(ptr %base, <16 x i1> %mask, <16 x i32> %passthru) {
+; RV64-LABEL: test_expandload_v16i32:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 16, e32, m4, ta, ma
+; RV64-NEXT:    viota.m v12, v0
+; RV64-NEXT:    vsll.vi v12, v12, 2, v0.t
+; RV64-NEXT:    vsetvli zero, zero, e32, m4, ta, mu
+; RV64-NEXT:    vluxei32.v v8, (a0), v12, v0.t
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v16i32:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 16, e32, m4, ta, ma
+; RV32-NEXT:    viota.m v12, v0
+; RV32-NEXT:    vsll.vi v12, v12, 2, v0.t
+; RV32-NEXT:    vsetvli zero, zero, e32, m4, ta, mu
+; RV32-NEXT:    vluxei32.v v8, (a0), v12, v0.t
+; RV32-NEXT:    ret
+  %res = call <16 x i32> @llvm.masked.expandload.v16i32(ptr align 4 %base, <16 x i1> %mask, <16 x i32> %passthru)
+  ret <16 x i32> %res
+}
+
+define <16 x i32> @test_expandload_v16i32_all_ones(ptr %base, <16 x i32> %passthru) {
+; RV64-LABEL: test_expandload_v16i32_all_ones:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 16, e32, m4, ta, ma
+; RV64-NEXT:    vle32.v v8, (a0)
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v16i32_all_ones:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 16, e32, m4, ta, ma
+; RV32-NEXT:    vle32.v v8, (a0)
+; RV32-NEXT:    ret
+  %res = call <16 x i32> @llvm.masked.expandload.v16i32(ptr align 4 %base, <16 x i1> splat (i1 true), <16 x i32> %passthru)
+  ret <16 x i32> %res
+}
+
+define <32 x i32> @test_expandload_v32i32(ptr %base, <32 x i1> %mask, <32 x i32> %passthru) {
+; RV64-LABEL: test_expandload_v32i32:
+; RV64:       # %bb.0:
+; RV64-NEXT:    li a1, 32
+; RV64-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
+; RV64-NEXT:    viota.m v16, v0
+; RV64-NEXT:    vsll.vi v16, v16, 2, v0.t
+; RV64-NEXT:    vsetvli zero, zero, e32, m8, ta, mu
+; RV64-NEXT:    vluxei32.v v8, (a0), v16, v0.t
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v32i32:
+; RV32:       # %bb.0:
+; RV32-NEXT:    li a1, 32
+; RV32-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
+; RV32-NEXT:    viota.m v16, v0
+; RV32-NEXT:    vsll.vi v16, v16, 2, v0.t
+; RV32-NEXT:    vsetvli zero, zero, e32, m8, ta, mu
+; RV32-NEXT:    vluxei32.v v8, (a0), v16, v0.t
+; RV32-NEXT:    ret
+  %res = call <32 x i32> @llvm.masked.expandload.v32i32(ptr align 4 %base, <32 x i1> %mask, <32 x i32> %passthru)
+  ret <32 x i32> %res
+}
+
+define <32 x i32> @test_expandload_v32i32_all_ones(ptr %base, <32 x i32> %passthru) {
+; RV64-LABEL: test_expandload_v32i32_all_ones:
+; RV64:       # %bb.0:
+; RV64-NEXT:    li a1, 32
+; RV64-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
+; RV64-NEXT:    vle32.v v8, (a0)
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v32i32_all_ones:
+; RV32:       # %bb.0:
+; RV32-NEXT:    li a1, 32
+; RV32-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
+; RV32-NEXT:    vle32.v v8, (a0)
+; RV32-NEXT:    ret
+  %res = call <32 x i32> @llvm.masked.expandload.v32i32(ptr align 4 %base, <32 x i1> splat (i1 true), <32 x i32> %passthru)
+  ret <32 x i32> %res
+}
+
+define <64 x i32> @test_expandload_v64i32(ptr %base, <64 x i1> %mask, <64 x i32> %passthru) {
+; RV64-LABEL: test_expandload_v64i32:
+; RV64:       # %bb.0:
+; RV64-NEXT:    addi sp, sp, -16
+; RV64-NEXT:    .cfi_def_cfa_offset 16
+; RV64-NEXT:    csrr a1, vlenb
+; RV64-NEXT:    slli a1, a1, 4
+; RV64-NEXT:    sub sp, sp, a1
+; RV64-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
+; RV64-NEXT:    csrr a1, vlenb
+; RV64-NEXT:    slli a1, a1, 3
+; RV64-NEXT:    add a1, sp, a1
+; RV64-NEXT:    addi a1, a1, 16
+; RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
+; RV64-NEXT:    vmv1r.v v7, v0
+; RV64-NEXT:    vsetivli zero, 4, e8, mf2, ta, ma
+; RV64-NEXT:    vslidedown.vi v0, v0, 4
+; RV64-NEXT:    li a1, 32
+; RV64-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
+; RV64-NEXT:    viota.m v16, v0
+; RV64-NEXT:    vsll.vi v16, v16, 2, v0.t
+; RV64-NEXT:    addi a1, sp, 16
+; RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
+; RV64-NEXT:    vmv.x.s a1, v7
+; RV64-NEXT:    cpopw a1, a1
+; RV64-NEXT:    slli a1, a1, 2
+; RV64-NEXT:    add a1, a0, a1
+; RV64-NEXT:    csrr a2, vlenb
+; RV64-NEXT:    slli a2, a2, 3
+; RV64-NEXT:    add a2, sp, a2
+; RV64-NEXT:    addi a2, a2, 16
+; RV64-NEXT:    vl8r.v v16, (a2) # Unknown-size Folded Reload
+; RV64-NEXT:    addi a2, sp, 16
+; RV64-NEXT:    vl8r.v v24, (a2) # Unknown-size Folded Reload
+; RV64-NEXT:    vsetvli zero, zero, e32, m8, ta, mu
+; RV64-NEXT:    vluxei32.v v16, (a1), v24, v0.t
+; RV64-NEXT:    viota.m v24, v7
+; RV64-NEXT:    vmv1r.v v0, v7
+; RV64-NEXT:    vsetvli zero, zero, e32, m8, ta, ma
+; RV64-NEXT:    vsll.vi v24, v24, 2, v0.t
+; RV64-NEXT:    vsetvli zero, zero, e32, m8, ta, mu
+; RV64-NEXT:    vluxei32.v v8, (a0), v24, v0.t
+; RV64-NEXT:    csrr a0, vlenb
+; RV64-NEXT:    slli a0, a0, 4
+; RV64-NEXT:    add sp, sp, a0
+; RV64-NEXT:    addi sp, sp, 16
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v64i32:
+; RV32:       # %bb.0:
+; RV32-NEXT:    addi sp, sp, -16
+; RV32-NEXT:    .cfi_def_cfa_offset 16
+; RV32-NEXT:    csrr a1, vlenb
+; RV32-NEXT:    slli a1, a1, 4
+; RV32-NEXT:    sub sp, sp, a1
+; RV32-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
+; RV32-NEXT:    csrr a1, vlenb
+; RV32-NEXT:    slli a1, a1, 3
+; RV32-NEXT:    add a1, sp, a1
+; RV32-NEXT:    addi a1, a1, 16
+; RV32-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
+; RV32-NEXT:    vmv1r.v v7, v0
+; RV32-NEXT:    vsetivli zero, 4, e8, mf2, ta, ma
+; RV32-NEXT:    vslidedown.vi v0, v0, 4
+; RV32-NEXT:    li a1, 32
+; RV32-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
+; RV32-NEXT:    viota.m v16, v0
+; RV32-NEXT:    vsll.vi v16, v16, 2, v0.t
+; RV32-NEXT:    addi a1, sp, 16
+; RV32-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
+; RV32-NEXT:    vmv.x.s a1, v7
+; RV32-NEXT:    cpop a1, a1
+; RV32-NEXT:    slli a1, a1, 2
+; RV32-NEXT:    add a1, a0, a1
+; RV32-NEXT:    csrr a2, vlenb
+; RV32-NEXT:    slli a2, a2, 3
+; RV32-NEXT:    add a2, sp, a2
+; RV32-NEXT:    addi a2, a2, 16
+; RV32-NEXT:    vl8r.v v16, (a2) # Unknown-size Folded Reload
+; RV32-NEXT:    addi a2, sp, 16
+; RV32-NEXT:    vl8r.v v24, (a2) # Unknown-size Folded Reload
+; RV32-NEXT:    vsetvli zero, zero, e32, m8, ta, mu
+; RV32-NEXT:    vluxei32.v v16, (a1), v24, v0.t
+; RV32-NEXT:    viota.m v24, v7
+; RV32-NEXT:    vmv1r.v v0, v7
+; RV32-NEXT:    vsetvli zero, zero, e32, m8, ta, ma
+; RV32-NEXT:    vsll.vi v24, v24, 2, v0.t
+; RV32-NEXT:    vsetvli zero, zero, e32, m8, ta, mu
+; RV32-NEXT:    vluxei32.v v8, (a0), v24, v0.t
+; RV32-NEXT:    csrr a0, vlenb
+; RV32-NEXT:    slli a0, a0, 4
+; RV32-NEXT:    add sp, sp, a0
+; RV32-NEXT:    addi sp, sp, 16
+; RV32-NEXT:    ret
+  %res = call <64 x i32> @llvm.masked.expandload.v64i32(ptr align 4 %base, <64 x i1> %mask, <64 x i32> %passthru)
+  ret <64 x i32> %res
+}
+
+define <64 x i32> @test_expandload_v64i32_all_ones(ptr %base, <64 x i32> %passthru) {
+; RV64-LABEL: test_expandload_v64i32_all_ones:
+; RV64:       # %bb.0:
+; RV64-NEXT:    li a1, 32
+; RV64-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
+; RV64-NEXT:    vle32.v v8, (a0)
+; RV64-NEXT:    addi a0, a0, 128
+; RV64-NEXT:    vle32.v v16, (a0)
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v64i32_all_ones:
+; RV32:       # %bb.0:
+; RV32-NEXT:    li a1, 32
+; RV32-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
+; RV32-NEXT:    vle32.v v8, (a0)
+; RV32-NEXT:    vmset.m v16
+; RV32-NEXT:    vmv.x.s a1, v16
+; RV32-NEXT:    cpop a1, a1
+; RV32-NEXT:    slli a1, a1, 2
+; RV32-NEXT:    add a0, a0, a1
+; RV32-NEXT:    vle32.v v16, (a0)
+; RV32-NEXT:    ret
+  %res = call <64 x i32> @llvm.masked.expandload.v64i32(ptr align 4 %base, <64 x i1> splat (i1 true), <64 x i32> %passthru)
+  ret <64 x i32> %res
+}
+
+declare <1 x i32> @llvm.masked.expandload.v1i32(ptr, <1 x i1>, <1 x i32>)
+declare <2 x i32> @llvm.masked.expandload.v2i32(ptr, <2 x i1>, <2 x i32>)
+declare <4 x i32> @llvm.masked.expandload.v4i32(ptr, <4 x i1>, <4 x i32>)
+declare <8 x i32> @llvm.masked.expandload.v8i32(ptr, <8 x i1>, <8 x i32>)
+declare <16 x i32> @llvm.masked.expandload.v16i32(ptr, <16 x i1>, <16 x i32>)
+declare <32 x i32> @llvm.masked.expandload.v32i32(ptr, <32 x i1>, <32 x i32>)
+declare <64 x i32> @llvm.masked.expandload.v64i32(ptr, <64 x i1>, <64 x i32>)
+
+; Compress + store for i64 type
+
+define <1 x i64> @test_expandload_v1i64(ptr %base, <1 x i1> %mask, <1 x i64> %passthru) {
+; RV64-LABEL: test_expandload_v1i64:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; RV64-NEXT:    viota.m v9, v0
+; RV64-NEXT:    vsll.vi v9, v9, 3, v0.t
+; RV64-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
+; RV64-NEXT:    vluxei64.v v8, (a0), v9, v0.t
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v1i64:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 1, e32, mf2, ta, ma
+; RV32-NEXT:    viota.m v9, v0
+; RV32-NEXT:    vsll.vi v9, v9, 3, v0.t
+; RV32-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
+; RV32-NEXT:    vluxei32.v v8, (a0), v9, v0.t
+; RV32-NEXT:    ret
+  %res = call <1 x i64> @llvm.masked.expandload.v1i64(ptr align 8 %base, <1 x i1> %mask, <1 x i64> %passthru)
+  ret <1 x i64> %res
+}
+
+define <1 x i64> @test_expandload_v1i64_all_ones(ptr %base, <1 x i64> %passthru) {
+; RV64-LABEL: test_expandload_v1i64_all_ones:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; RV64-NEXT:    vle64.v v8, (a0)
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v1i64_all_ones:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; RV32-NEXT:    vle64.v v8, (a0)
+; RV32-NEXT:    ret
+  %res = call <1 x i64> @llvm.masked.expandload.v1i64(ptr align 8 %base, <1 x i1> splat (i1 true), <1 x i64> %passthru)
+  ret <1 x i64> %res
+}
+
+define <2 x i64> @test_expandload_v2i64(ptr %base, <2 x i1> %mask, <2 x i64> %passthru) {
+; RV64-LABEL: test_expandload_v2i64:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 2, e64, m1, ta, ma
+; RV64-NEXT:    viota.m v9, v0
+; RV64-NEXT:    vsll.vi v9, v9, 3, v0.t
+; RV64-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
+; RV64-NEXT:    vluxei64.v v8, (a0), v9, v0.t
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v2i64:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
+; RV32-NEXT:    viota.m v9, v0
+; RV32-NEXT:    vsll.vi v9, v9, 3, v0.t
+; RV32-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
+; RV32-NEXT:    vluxei32.v v8, (a0), v9, v0.t
+; RV32-NEXT:    ret
+  %res = call <2 x i64> @llvm.masked.expandload.v2i64(ptr align 8 %base, <2 x i1> %mask, <2 x i64> %passthru)
+  ret <2 x i64> %res
+}
+
+define <2 x i64> @test_expandload_v2i64_all_ones(ptr %base, <2 x i64> %passthru) {
+; RV64-LABEL: test_expandload_v2i64_all_ones:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 2, e64, m1, ta, ma
+; RV64-NEXT:    vle64.v v8, (a0)
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v2i64_all_ones:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 2, e64, m1, ta, ma
+; RV32-NEXT:    vle64.v v8, (a0)
+; RV32-NEXT:    ret
+  %res = call <2 x i64> @llvm.masked.expandload.v2i64(ptr align 8 %base, <2 x i1> splat (i1 true), <2 x i64> %passthru)
+  ret <2 x i64> %res
+}
+
+define <4 x i64> @test_expandload_v4i64(ptr %base, <4 x i1> %mask, <4 x i64> %passthru) {
+; RV64-LABEL: test_expandload_v4i64:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 4, e64, m2, ta, ma
+; RV64-NEXT:    viota.m v10, v0
+; RV64-NEXT:    vsll.vi v10, v10, 3, v0.t
+; RV64-NEXT:    vsetvli zero, zero, e64, m2, ta, mu
+; RV64-NEXT:    vluxei64.v v8, (a0), v10, v0.t
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v4i64:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
+; RV32-NEXT:    viota.m v10, v0
+; RV32-NEXT:    vsll.vi v10, v10, 3, v0.t
+; RV32-NEXT:    vsetvli zero, zero, e64, m2, ta, mu
+; RV32-NEXT:    vluxei32.v v8, (a0), v10, v0.t
+; RV32-NEXT:    ret
+  %res = call <4 x i64> @llvm.masked.expandload.v4i64(ptr align 8 %base, <4 x i1> %mask, <4 x i64> %passthru)
+  ret <4 x i64> %res
+}
+
+define <4 x i64> @test_expandload_v4i64_all_ones(ptr %base, <4 x i64> %passthru) {
+; RV64-LABEL: test_expandload_v4i64_all_ones:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 4, e64, m2, ta, ma
+; RV64-NEXT:    vle64.v v8, (a0)
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v4i64_all_ones:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 4, e64, m2, ta, ma
+; RV32-NEXT:    vle64.v v8, (a0)
+; RV32-NEXT:    ret
+  %res = call <4 x i64> @llvm.masked.expandload.v4i64(ptr align 8 %base, <4 x i1> splat (i1 true), <4 x i64> %passthru)
+  ret <4 x i64> %res
+}
+
+define <8 x i64> @test_expandload_v8i64(ptr %base, <8 x i1> %mask, <8 x i64> %passthru) {
+; RV64-LABEL: test_expandload_v8i64:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 8, e64, m4, ta, ma
+; RV64-NEXT:    viota.m v12, v0
+; RV64-NEXT:    vsll.vi v12, v12, 3, v0.t
+; RV64-NEXT:    vsetvli zero, zero, e64, m4, ta, mu
+; RV64-NEXT:    vluxei64.v v8, (a0), v12, v0.t
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v8i64:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
+; RV32-NEXT:    viota.m v12, v0
+; RV32-NEXT:    vsll.vi v12, v12, 3, v0.t
+; RV32-NEXT:    vsetvli zero, zero, e64, m4, ta, mu
+; RV32-NEXT:    vluxei32.v v8, (a0), v12, v0.t
+; RV32-NEXT:    ret
+  %res = call <8 x i64> @llvm.masked.expandload.v8i64(ptr align 8 %base, <8 x i1> %mask, <8 x i64> %passthru)
+  ret <8 x i64> %res
+}
+
+define <8 x i64> @test_expandload_v8i64_all_ones(ptr %base, <8 x i64> %passthru) {
+; RV64-LABEL: test_expandload_v8i64_all_ones:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 8, e64, m4, ta, ma
+; RV64-NEXT:    vle64.v v8, (a0)
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v8i64_all_ones:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 8, e64, m4, ta, ma
+; RV32-NEXT:    vle64.v v8, (a0)
+; RV32-NEXT:    ret
+  %res = call <8 x i64> @llvm.masked.expandload.v8i64(ptr align 8 %base, <8 x i1> splat (i1 true), <8 x i64> %passthru)
+  ret <8 x i64> %res
+}
+
+define <16 x i64> @test_expandload_v16i64(ptr %base, <16 x i1> %mask, <16 x i64> %passthru) {
+; RV64-LABEL: test_expandload_v16i64:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 16, e64, m8, ta, ma
+; RV64-NEXT:    viota.m v16, v0
+; RV64-NEXT:    vsll.vi v16, v16, 3, v0.t
+; RV64-NEXT:    vsetvli zero, zero, e64, m8, ta, mu
+; RV64-NEXT:    vluxei64.v v8, (a0), v16, v0.t
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v16i64:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 16, e32, m4, ta, ma
+; RV32-NEXT:    viota.m v16, v0
+; RV32-NEXT:    vsll.vi v16, v16, 3, v0.t
+; RV32-NEXT:    vsetvli zero, zero, e64, m8, ta, mu
+; RV32-NEXT:    vluxei32.v v8, (a0), v16, v0.t
+; RV32-NEXT:    ret
+  %res = call <16 x i64> @llvm.masked.expandload.v16i64(ptr align 8 %base, <16 x i1> %mask, <16 x i64> %passthru)
+  ret <16 x i64> %res
+}
+
+define <16 x i64> @test_expandload_v16i64_all_ones(ptr %base, <16 x i64> %passthru) {
+; RV64-LABEL: test_expandload_v16i64_all_ones:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 16, e64, m8, ta, ma
+; RV64-NEXT:    vle64.v v8, (a0)
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v16i64_all_ones:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 16, e64, m8, ta, ma
+; RV32-NEXT:    vle64.v v8, (a0)
+; RV32-NEXT:    ret
+  %res = call <16 x i64> @llvm.masked.expandload.v16i64(ptr align 8 %base, <16 x i1> splat (i1 true), <16 x i64> %passthru)
+  ret <16 x i64> %res
+}
+
+define <32 x i64> @test_expandload_v32i64(ptr %base, <32 x i1> %mask, <32 x i64> %passthru) {
+; RV64-LABEL: test_expandload_v32i64:
+; RV64:       # %bb.0:
+; RV64-NEXT:    addi sp, sp, -16
+; RV64-NEXT:    .cfi_def_cfa_offset 16
+; RV64-NEXT:    csrr a1, vlenb
+; RV64-NEXT:    slli a1, a1, 4
+; RV64-NEXT:    sub sp, sp, a1
+; RV64-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
+; RV64-NEXT:    csrr a1, vlenb
+; RV64-NEXT:    slli a1, a1, 3
+; RV64-NEXT:    add a1, sp, a1
+; RV64-NEXT:    addi a1, a1, 16
+; RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
+; RV64-NEXT:    vmv1r.v v7, v0
+; RV64-NEXT:    vsetivli zero, 2, e8, mf4, ta, ma
+; RV64-NEXT:    vslidedown.vi v0, v0, 2
+; RV64-NEXT:    vsetivli zero, 16, e64, m8, ta, ma
+; RV64-NEXT:    viota.m v16, v0
+; RV64-NEXT:    vsll.vi v16, v16, 3, v0.t
+; RV64-NEXT:    addi a1, sp, 16
+; RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
+; RV64-NEXT:    vsetvli zero, zero, e16, m2, ta, ma
+; RV64-NEXT:    vmv.x.s a1, v7
+; RV64-NEXT:    zext.h a1, a1
+; RV64-NEXT:    cpopw a1, a1
+; RV64-NEXT:    slli a1, a1, 3
+; RV64-NEXT:    add a1, a0, a1
+; RV64-NEXT:    csrr a2, vlenb
+; RV64-NEXT:    slli a2, a2, 3
+; RV64-NEXT:    add a2, sp, a2
+; RV64-NEXT:    addi a2, a2, 16
+; RV64-NEXT:    vl8r.v v16, (a2) # Unknown-size Folded Reload
+; RV64-NEXT:    addi a2, sp, 16
+; RV64-NEXT:    vl8r.v v24, (a2) # Unknown-size Folded Reload
+; RV64-NEXT:    vsetvli zero, zero, e64, m8, ta, mu
+; RV64-NEXT:    vluxei64.v v16, (a1), v24, v0.t
+; RV64-NEXT:    viota.m v24, v7
+; RV64-NEXT:    vmv1r.v v0, v7
+; RV64-NEXT:    vsetvli zero, zero, e64, m8, ta, ma
+; RV64-NEXT:    vsll.vi v24, v24, 3, v0.t
+; RV64-NEXT:    vsetvli zero, zero, e64, m8, ta, mu
+; RV64-NEXT:    vluxei64.v v8, (a0), v24, v0.t
+; RV64-NEXT:    csrr a0, vlenb
+; RV64-NEXT:    slli a0, a0, 4
+; RV64-NEXT:    add sp, sp, a0
+; RV64-NEXT:    addi sp, sp, 16
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v32i64:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vmv1r.v v24, v0
+; RV32-NEXT:    vsetivli zero, 2, e8, mf4, ta, ma
+; RV32-NEXT:    vslidedown.vi v0, v0, 2
+; RV32-NEXT:    vsetivli zero, 16, e32, m4, ta, ma
+; RV32-NEXT:    viota.m v28, v0
+; RV32-NEXT:    vsll.vi v28, v28, 3, v0.t
+; RV32-NEXT:    vsetvli zero, zero, e16, m2, ta, ma
+; RV32-NEXT:    vmv.x.s a1, v24
+; RV32-NEXT:    zext.h a1, a1
+; RV32-NEXT:    cpop a1, a1
+; RV32-NEXT:    slli a1, a1, 3
+; RV32-NEXT:    add a1, a0, a1
+; RV32-NEXT:    vsetvli zero, zero, e64, m8, ta, mu
+; RV32-NEXT:    vluxei32.v v16, (a1), v28, v0.t
+; RV32-NEXT:    vsetvli zero, zero, e32, m4, ta, ma
+; RV32-NEXT:    viota.m v28, v24
+; RV32-NEXT:    vmv1r.v v0, v24
+; RV32-NEXT:    vsll.vi v28, v28, 3, v0.t
+; RV32-NEXT:    vsetvli zero, zero, e64, m8, ta, mu
+; RV32-NEXT:    vluxei32.v v8, (a0), v28, v0.t
+; RV32-NEXT:    ret
+  %res = call <32 x i64> @llvm.masked.expandload.v32i64(ptr align 8 %base, <32 x i1> %mask, <32 x i64> %passthru)
+  ret <32 x i64> %res
+}
+
+define <32 x i64> @test_expandload_v32i64_all_ones(ptr %base, <32 x i64> %passthru) {
+; RV64-LABEL: test_expandload_v32i64_all_ones:
+; RV64:       # %bb.0:
+; RV64-NEXT:    vsetivli zero, 16, e64, m8, ta, ma
+; RV64-NEXT:    vle64.v v8, (a0)
+; RV64-NEXT:    addi a0, a0, 128
+; RV64-NEXT:    vle64.v v16, (a0)
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v32i64_all_ones:
+; RV32:       # %bb.0:
+; RV32-NEXT:    vsetivli zero, 16, e64, m8, ta, ma
+; RV32-NEXT:    vle64.v v8, (a0)
+; RV32-NEXT:    addi a0, a0, 128
+; RV32-NEXT:    vle64.v v16, (a0)
+; RV32-NEXT:    ret
+  %res = call <32 x i64> @llvm.masked.expandload.v32i64(ptr align 8 %base, <32 x i1> splat (i1 true), <32 x i64> %passthru)
+  ret <32 x i64> %res
+}
+
+declare <1 x i64> @llvm.masked.expandload.v1i64(ptr, <1 x i1>, <1 x i64>)
+declare <2 x i64> @llvm.masked.expandload.v2i64(ptr, <2 x i1>, <2 x i64>)
+declare <4 x i64> @llvm.masked.expandload.v4i64(ptr, <4 x i1>, <4 x i64>)
+declare <8 x i64> @llvm.masked.expandload.v8i64(ptr, <8 x i1>, <8 x i64>)
+declare <16 x i64> @llvm.masked.expandload.v16i64(ptr, <16 x i1>, <16 x i64>)
+declare <32 x i64> @llvm.masked.expandload.v32i64(ptr, <32 x i1>, <32 x i64>)
diff --git a/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-expandload-fp.ll b/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-expandload-fp.ll
index 8b31166e313deb..a83036b2f9342f 100644
--- a/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-expandload-fp.ll
+++ b/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-expandload-fp.ll
@@ -6,24 +6,20 @@ declare <1 x half> @llvm.masked.expandload.v1f16(ptr, <1 x i1>, <1 x half>)
 define <1 x half> @expandload_v1f16(ptr %base, <1 x half> %src0, <1 x i1> %mask) {
 ; RV32-LABEL: expandload_v1f16:
 ; RV32:       # %bb.0:
-; RV32-NEXT:    vsetvli a1, zero, e8, mf8, ta, ma
-; RV32-NEXT:    vfirst.m a1, v0
-; RV32-NEXT:    bnez a1, .LBB0_2
-; RV32-NEXT:  # %bb.1: # %cond.load
 ; RV32-NEXT:    vsetivli zero, 1, e16, mf4, ta, ma
-; RV32-NEXT:    vle16.v v8, (a0)
-; RV32-NEXT:  .LBB0_2: # %else
+; RV32-NEXT:    viota.m v9, v0
+; RV32-NEXT:    vsll.vi v9, v9, 1, v0.t
+; RV32-NEXT:    vsetvli zero, zero, e16, mf4, ta, mu
+; RV32-NEXT:    vluxei16.v v8, (a0), v9, v0.t
 ; RV32-NEXT:    ret
 ;
 ; RV64-LABEL: expandload_v1f16:
 ; RV64:       # %bb.0:
-; RV64-NEXT:    vsetvli a1, zero, e8, mf8, ta, ma
-; RV64-NEXT:    vfirst.m a1, v0
-; RV64-NEXT:    bnez a1, .LBB0_2
-; RV64-NEXT:  # %bb.1: # %cond.load
 ; RV64-NEXT:    vsetivli zero, 1, e16, mf4, ta, ma
-; RV64-NEXT:    vle16.v v8, (a0)
-; RV64-NEXT:  .LBB0_2: # %else
+; RV64-NEXT:    viota.m v9, v0
+; RV64-NEXT:    vsll.vi v9, v9, 1, v0.t
+; RV64-NEXT:    vsetvli zero, zero, e16, mf4, ta, mu
+; RV64-NEXT:    vluxei16.v v8, (a0), v9, v0.t
 ; RV64-NEXT:    ret
   %res = call <1 x half> @llvm.masked.expandload.v1f16(ptr align 2 %base, <1 x i1> %mask, <1 x half> %src0)
   ret <1 x half>%res
@@ -33,54 +29,20 @@ declare <2 x half> @llvm.masked.expandload.v2f16(ptr, <2 x i1>, <2 x half>)
 define <2 x half> @expandload_v2f16(ptr %base, <2 x half> %src0, <2 x i1> %mask) {
 ; RV32-LABEL: expandload_v2f16:
 ; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 1, e8, m1, ta, ma
-; RV32-NEXT:    vmv.x.s a1, v0
-; RV32-NEXT:    andi a2, a1, 1
-; RV32-NEXT:    bnez a2, .LBB1_3
-; RV32-NEXT:  # %bb.1: # %else
-; RV32-NEXT:    andi a1, a1, 2
-; RV32-NEXT:    bnez a1, .LBB1_4
-; RV32-NEXT:  .LBB1_2: # %else2
-; RV32-NEXT:    ret
-; RV32-NEXT:  .LBB1_3: # %cond.load
-; RV32-NEXT:    flh fa5, 0(a0)
-; RV32-NEXT:    vsetvli zero, zero, e16, m2, tu, ma
-; RV32-NEXT:    vfmv.s.f v8, fa5
-; RV32-NEXT:    addi a0, a0, 2
-; RV32-NEXT:    andi a1, a1, 2
-; RV32-NEXT:    beqz a1, .LBB1_2
-; RV32-NEXT:  .LBB1_4: # %cond.load1
-; RV32-NEXT:    flh fa5, 0(a0)
-; RV32-NEXT:    vsetvli zero, zero, e16, m2, ta, ma
-; RV32-NEXT:    vfmv.s.f v9, fa5
 ; RV32-NEXT:    vsetivli zero, 2, e16, mf4, ta, ma
-; RV32-NEXT:    vslideup.vi v8, v9, 1
+; RV32-NEXT:    viota.m v9, v0
+; RV32-NEXT:    vsll.vi v9, v9, 1, v0.t
+; RV32-NEXT:    vsetvli zero, zero, e16, mf4, ta, mu
+; RV32-NEXT:    vluxei16.v v8, (a0), v9, v0.t
 ; RV32-NEXT:    ret
 ;
 ; RV64-LABEL: expandload_v2f16:
 ; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 1, e8, m1, ta, ma
-; RV64-NEXT:    vmv.x.s a1, v0
-; RV64-NEXT:    andi a2, a1, 1
-; RV64-NEXT:    bnez a2, .LBB1_3
-; RV64-NEXT:  # %bb.1: # %else
-; RV64-NEXT:    andi a1, a1, 2
-; RV64-NEXT:    bnez a1, .LBB1_4
-; RV64-NEXT:  .LBB1_2: # %else2
-; RV64-NEXT:    ret
-; RV64-NEXT:  .LBB1_3: # %cond.load
-; RV64-NEXT:    flh fa5, 0(a0)
-; RV64-NEXT:    vsetvli zero, zero, e16, m2, tu, ma
-; RV64-NEXT:    vfmv.s.f v8, fa5
-; RV64-NEXT:    addi a0, a0, 2
-; RV64-NEXT:    andi a1, a1, 2
-; RV64-NEXT:    beqz a1, .LBB1_2
-; RV64-NEXT:  .LBB1_4: # %cond.load1
-; RV64-NEXT:    flh fa5, 0(a0)
-; RV64-NEXT:    vsetvli zero, zero, e16, m2, ta, ma
-; RV64-NEXT:    vfmv.s.f v9, fa5
 ; RV64-NEXT:    vsetivli zero, 2, e16, mf4, ta, ma
-; RV64-NEXT:    vslideup.vi v8, v9, 1
+; RV64-NEXT:    viota.m v9, v0
+; RV64-NEXT:    vsll.vi v9, v9, 1, v0.t
+; RV64-NEXT:    vsetvli zero, zero, e16, mf4, ta, mu
+; RV64-NEXT:    vluxei16.v v8, (a0), v9, v0.t
 ; RV64-NEXT:    ret
   %res = call <2 x half> @llvm.masked.expandload.v2f16(ptr align 2 %base, <2 x i1> %mask, <2 x half> %src0)
   ret <2 x half>%res
@@ -90,98 +52,20 @@ declare <4 x half> @llvm.masked.expandload.v4f16(ptr, <4 x i1>, <4 x half>)
 define <4 x half> @expandload_v4f16(ptr %base, <4 x half> %src0, <4 x i1> %mask) {
 ; RV32-LABEL: expandload_v4f16:
 ; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 1, e8, m1, ta, ma
-; RV32-NEXT:    vmv.x.s a1, v0
-; RV32-NEXT:    andi a2, a1, 1
-; RV32-NEXT:    bnez a2, .LBB2_5
-; RV32-NEXT:  # %bb.1: # %else
-; RV32-NEXT:    andi a2, a1, 2
-; RV32-NEXT:    bnez a2, .LBB2_6
-; RV32-NEXT:  .LBB2_2: # %else2
-; RV32-NEXT:    andi a2, a1, 4
-; RV32-NEXT:    bnez a2, .LBB2_7
-; RV32-NEXT:  .LBB2_3: # %else6
-; RV32-NEXT:    andi a1, a1, 8
-; RV32-NEXT:    bnez a1, .LBB2_8
-; RV32-NEXT:  .LBB2_4: # %else10
-; RV32-NEXT:    ret
-; RV32-NEXT:  .LBB2_5: # %cond.load
-; RV32-NEXT:    flh fa5, 0(a0)
-; RV32-NEXT:    vsetvli zero, zero, e16, m2, tu, ma
-; RV32-NEXT:    vfmv.s.f v8, fa5
-; RV32-NEXT:    addi a0, a0, 2
-; RV32-NEXT:    andi a2, a1, 2
-; RV32-NEXT:    beqz a2, .LBB2_2
-; RV32-NEXT:  .LBB2_6: # %cond.load1
-; RV32-NEXT:    flh fa5, 0(a0)
-; RV32-NEXT:    vsetvli zero, zero, e16, m2, ta, ma
-; RV32-NEXT:    vfmv.s.f v9, fa5
-; RV32-NEXT:    vsetivli zero, 2, e16, mf2, tu, ma
-; RV32-NEXT:    vslideup.vi v8, v9, 1
-; RV32-NEXT:    addi a0, a0, 2
-; RV32-NEXT:    andi a2, a1, 4
-; RV32-NEXT:    beqz a2, .LBB2_3
-; RV32-NEXT:  .LBB2_7: # %cond.load5
-; RV32-NEXT:    flh fa5, 0(a0)
-; RV32-NEXT:    vsetivli zero, 3, e16, mf2, tu, ma
-; RV32-NEXT:    vfmv.s.f v9, fa5
-; RV32-NEXT:    vslideup.vi v8, v9, 2
-; RV32-NEXT:    addi a0, a0, 2
-; RV32-NEXT:    andi a1, a1, 8
-; RV32-NEXT:    beqz a1, .LBB2_4
-; RV32-NEXT:  .LBB2_8: # %cond.load9
-; RV32-NEXT:    flh fa5, 0(a0)
 ; RV32-NEXT:    vsetivli zero, 4, e16, mf2, ta, ma
-; RV32-NEXT:    vfmv.s.f v9, fa5
-; RV32-NEXT:    vslideup.vi v8, v9, 3
+; RV32-NEXT:    viota.m v9, v0
+; RV32-NEXT:    vsll.vi v9, v9, 1, v0.t
+; RV32-NEXT:    vsetvli zero, zero, e16, mf2, ta, mu
+; RV32-NEXT:    vluxei16.v v8, (a0), v9, v0.t
 ; RV32-NEXT:    ret
 ;
 ; RV64-LABEL: expandload_v4f16:
 ; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 1, e8, m1, ta, ma
-; RV64-NEXT:    vmv.x.s a1, v0
-; RV64-NEXT:    andi a2, a1, 1
-; RV64-NEXT:    bnez a2, .LBB2_5
-; RV64-NEXT:  # %bb.1: # %else
-; RV64-NEXT:    andi a2, a1, 2
-; RV64-NEXT:    bnez a2, .LBB2_6
-; RV64-NEXT:  .LBB2_2: # %else2
-; RV64-NEXT:    andi a2, a1, 4
-; RV64-NEXT:    bnez a2, .LBB2_7
-; RV64-NEXT:  .LBB2_3: # %else6
-; RV64-NEXT:    andi a1, a1, 8
-; RV64-NEXT:    bnez a1, .LBB2_8
-; RV64-NEXT:  .LBB2_4: # %else10
-; RV64-NEXT:    ret
-; RV64-NEXT:  .LBB2_5: # %cond.load
-; RV64-NEXT:    flh fa5, 0(a0)
-; RV64-NEXT:    vsetvli zero, zero, e16, m2, tu, ma
-; RV64-NEXT:    vfmv.s.f v8, fa5
-; RV64-NEXT:    addi a0, a0, 2
-; RV64-NEXT:    andi a2, a1, 2
-; RV64-NEXT:    beqz a2, .LBB2_2
-; RV64-NEXT:  .LBB2_6: # %cond.load1
-; RV64-NEXT:    flh fa5, 0(a0)
-; RV64-NEXT:    vsetvli zero, zero, e16, m2, ta, ma
-; RV64-NEXT:    vfmv.s.f v9, fa5
-; RV64-NEXT:    vsetivli zero, 2, e16, mf2, tu, ma
-; RV64-NEXT:    vslideup.vi v8, v9, 1
-; RV64-NEXT:    addi a0, a0, 2
-; RV64-NEXT:    andi a2, a1, 4
-; RV64-NEXT:    beqz a2, .LBB2_3
-; RV64-NEXT:  .LBB2_7: # %cond.load5
-; RV64-NEXT:    flh fa5, 0(a0)
-; RV64-NEXT:    vsetivli zero, 3, e16, mf2, tu, ma
-; RV64-NEXT:    vfmv.s.f v9, fa5
-; RV64-NEXT:    vslideup.vi v8, v9, 2
-; RV64-NEXT:    addi a0, a0, 2
-; RV64-NEXT:    andi a1, a1, 8
-; RV64-NEXT:    beqz a1, .LBB2_4
-; RV64-NEXT:  .LBB2_8: # %cond.load9
-; RV64-NEXT:    flh fa5, 0(a0)
 ; RV64-NEXT:    vsetivli zero, 4, e16, mf2, ta, ma
-; RV64-NEXT:    vfmv.s.f v9, fa5
-; RV64-NEXT:    vslideup.vi v8, v9, 3
+; RV64-NEXT:    viota.m v9, v0
+; RV64-NEXT:    vsll.vi v9, v9, 1, v0.t
+; RV64-NEXT:    vsetvli zero, zero, e16, mf2, ta, mu
+; RV64-NEXT:    vluxei16.v v8, (a0), v9, v0.t
 ; RV64-NEXT:    ret
   %res = call <4 x half> @llvm.masked.expandload.v4f16(ptr align 2 %base, <4 x i1> %mask, <4 x half> %src0)
   ret <4 x half>%res
@@ -191,186 +75,20 @@ declare <8 x half> @llvm.masked.expandload.v8f16(ptr, <8 x i1>, <8 x half>)
 define <8 x half> @expandload_v8f16(ptr %base, <8 x half> %src0, <8 x i1> %mask) {
 ; RV32-LABEL: expandload_v8f16:
 ; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 1, e8, m1, ta, ma
-; RV32-NEXT:    vmv.x.s a1, v0
-; RV32-NEXT:    andi a2, a1, 1
-; RV32-NEXT:    bnez a2, .LBB3_9
-; RV32-NEXT:  # %bb.1: # %else
-; RV32-NEXT:    andi a2, a1, 2
-; RV32-NEXT:    bnez a2, .LBB3_10
-; RV32-NEXT:  .LBB3_2: # %else2
-; RV32-NEXT:    andi a2, a1, 4
-; RV32-NEXT:    bnez a2, .LBB3_11
-; RV32-NEXT:  .LBB3_3: # %else6
-; RV32-NEXT:    andi a2, a1, 8
-; RV32-NEXT:    bnez a2, .LBB3_12
-; RV32-NEXT:  .LBB3_4: # %else10
-; RV32-NEXT:    andi a2, a1, 16
-; RV32-NEXT:    bnez a2, .LBB3_13
-; RV32-NEXT:  .LBB3_5: # %else14
-; RV32-NEXT:    andi a2, a1, 32
-; RV32-NEXT:    bnez a2, .LBB3_14
-; RV32-NEXT:  .LBB3_6: # %else18
-; RV32-NEXT:    andi a2, a1, 64
-; RV32-NEXT:    bnez a2, .LBB3_15
-; RV32-NEXT:  .LBB3_7: # %else22
-; RV32-NEXT:    andi a1, a1, -128
-; RV32-NEXT:    bnez a1, .LBB3_16
-; RV32-NEXT:  .LBB3_8: # %else26
-; RV32-NEXT:    ret
-; RV32-NEXT:  .LBB3_9: # %cond.load
-; RV32-NEXT:    flh fa5, 0(a0)
-; RV32-NEXT:    vsetvli zero, zero, e16, m2, tu, ma
-; RV32-NEXT:    vfmv.s.f v8, fa5
-; RV32-NEXT:    addi a0, a0, 2
-; RV32-NEXT:    andi a2, a1, 2
-; RV32-NEXT:    beqz a2, .LBB3_2
-; RV32-NEXT:  .LBB3_10: # %cond.load1
-; RV32-NEXT:    flh fa5, 0(a0)
-; RV32-NEXT:    vsetvli zero, zero, e16, m2, ta, ma
-; RV32-NEXT:    vfmv.s.f v9, fa5
-; RV32-NEXT:    vsetivli zero, 2, e16, m1, tu, ma
-; RV32-NEXT:    vslideup.vi v8, v9, 1
-; RV32-NEXT:    addi a0, a0, 2
-; RV32-NEXT:    andi a2, a1, 4
-; RV32-NEXT:    beqz a2, .LBB3_3
-; RV32-NEXT:  .LBB3_11: # %cond.load5
-; RV32-NEXT:    flh fa5, 0(a0)
-; RV32-NEXT:    vsetivli zero, 3, e16, m1, tu, ma
-; RV32-NEXT:    vfmv.s.f v9, fa5
-; RV32-NEXT:    vslideup.vi v8, v9, 2
-; RV32-NEXT:    addi a0, a0, 2
-; RV32-NEXT:    andi a2, a1, 8
-; RV32-NEXT:    beqz a2, .LBB3_4
-; RV32-NEXT:  .LBB3_12: # %cond.load9
-; RV32-NEXT:    flh fa5, 0(a0)
-; RV32-NEXT:    vsetivli zero, 4, e16, m1, tu, ma
-; RV32-NEXT:    vfmv.s.f v9, fa5
-; RV32-NEXT:    vslideup.vi v8, v9, 3
-; RV32-NEXT:    addi a0, a0, 2
-; RV32-NEXT:    andi a2, a1, 16
-; RV32-NEXT:    beqz a2, .LBB3_5
-; RV32-NEXT:  .LBB3_13: # %cond.load13
-; RV32-NEXT:    flh fa5, 0(a0)
-; RV32-NEXT:    vsetivli zero, 5, e16, m1, tu, ma
-; RV32-NEXT:    vfmv.s.f v9, fa5
-; RV32-NEXT:    vslideup.vi v8, v9, 4
-; RV32-NEXT:    addi a0, a0, 2
-; RV32-NEXT:    andi a2, a1, 32
-; RV32-NEXT:    beqz a2, .LBB3_6
-; RV32-NEXT:  .LBB3_14: # %cond.load17
-; RV32-NEXT:    flh fa5, 0(a0)
-; RV32-NEXT:    vsetivli zero, 6, e16, m1, tu, ma
-; RV32-NEXT:    vfmv.s.f v9, fa5
-; RV32-NEXT:    vslideup.vi v8, v9, 5
-; RV32-NEXT:    addi a0, a0, 2
-; RV32-NEXT:    andi a2, a1, 64
-; RV32-NEXT:    beqz a2, .LBB3_7
-; RV32-NEXT:  .LBB3_15: # %cond.load21
-; RV32-NEXT:    flh fa5, 0(a0)
-; RV32-NEXT:    vsetivli zero, 7, e16, m1, tu, ma
-; RV32-NEXT:    vfmv.s.f v9, fa5
-; RV32-NEXT:    vslideup.vi v8, v9, 6
-; RV32-NEXT:    addi a0, a0, 2
-; RV32-NEXT:    andi a1, a1, -128
-; RV32-NEXT:    beqz a1, .LBB3_8
-; RV32-NEXT:  .LBB3_16: # %cond.load25
-; RV32-NEXT:    flh fa5, 0(a0)
 ; RV32-NEXT:    vsetivli zero, 8, e16, m1, ta, ma
-; RV32-NEXT:    vfmv.s.f v9, fa5
-; RV32-NEXT:    vslideup.vi v8, v9, 7
+; RV32-NEXT:    viota.m v9, v0
+; RV32-NEXT:    vsll.vi v9, v9, 1, v0.t
+; RV32-NEXT:    vsetvli zero, zero, e16, m1, ta, mu
+; RV32-NEXT:    vluxei16.v v8, (a0), v9, v0.t
 ; RV32-NEXT:    ret
 ;
 ; RV64-LABEL: expandload_v8f16:
 ; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 1, e8, m1, ta, ma
-; RV64-NEXT:    vmv.x.s a1, v0
-; RV64-NEXT:    andi a2, a1, 1
-; RV64-NEXT:    bnez a2, .LBB3_9
-; RV64-NEXT:  # %bb.1: # %else
-; RV64-NEXT:    andi a2, a1, 2
-; RV64-NEXT:    bnez a2, .LBB3_10
-; RV64-NEXT:  .LBB3_2: # %else2
-; RV64-NEXT:    andi a2, a1, 4
-; RV64-NEXT:    bnez a2, .LBB3_11
-; RV64-NEXT:  .LBB3_3: # %else6
-; RV64-NEXT:    andi a2, a1, 8
-; RV64-NEXT:    bnez a2, .LBB3_12
-; RV64-NEXT:  .LBB3_4: # %else10
-; RV64-NEXT:    andi a2, a1, 16
-; RV64-NEXT:    bnez a2, .LBB3_13
-; RV64-NEXT:  .LBB3_5: # %else14
-; RV64-NEXT:    andi a2, a1, 32
-; RV64-NEXT:    bnez a2, .LBB3_14
-; RV64-NEXT:  .LBB3_6: # %else18
-; RV64-NEXT:    andi a2, a1, 64
-; RV64-NEXT:    bnez a2, .LBB3_15
-; RV64-NEXT:  .LBB3_7: # %else22
-; RV64-NEXT:    andi a1, a1, -128
-; RV64-NEXT:    bnez a1, .LBB3_16
-; RV64-NEXT:  .LBB3_8: # %else26
-; RV64-NEXT:    ret
-; RV64-NEXT:  .LBB3_9: # %cond.load
-; RV64-NEXT:    flh fa5, 0(a0)
-; RV64-NEXT:    vsetvli zero, zero, e16, m2, tu, ma
-; RV64-NEXT:    vfmv.s.f v8, fa5
-; RV64-NEXT:    addi a0, a0, 2
-; RV64-NEXT:    andi a2, a1, 2
-; RV64-NEXT:    beqz a2, .LBB3_2
-; RV64-NEXT:  .LBB3_10: # %cond.load1
-; RV64-NEXT:    flh fa5, 0(a0)
-; RV64-NEXT:    vsetvli zero, zero, e16, m2, ta, ma
-; RV64-NEXT:    vfmv.s.f v9, fa5
-; RV64-NEXT:    vsetivli zero, 2, e16, m1, tu, ma
-; RV64-NEXT:    vslideup.vi v8, v9, 1
-; RV64-NEXT:    addi a0, a0, 2
-; RV64-NEXT:    andi a2, a1, 4
-; RV64-NEXT:    beqz a2, .LBB3_3
-; RV64-NEXT:  .LBB3_11: # %cond.load5
-; RV64-NEXT:    flh fa5, 0(a0)
-; RV64-NEXT:    vsetivli zero, 3, e16, m1, tu, ma
-; RV64-NEXT:    vfmv.s.f v9, fa5
-; RV64-NEXT:    vslideup.vi v8, v9, 2
-; RV64-NEXT:    addi a0, a0, 2
-; RV64-NEXT:    andi a2, a1, 8
-; RV64-NEXT:    beqz a2, .LBB3_4
-; RV64-NEXT:  .LBB3_12: # %cond.load9
-; RV64-NEXT:    flh fa5, 0(a0)
-; RV64-NEXT:    vsetivli zero, 4, e16, m1, tu, ma
-; RV64-NEXT:    vfmv.s.f v9, fa5
-; RV64-NEXT:    vslideup.vi v8, v9, 3
-; RV64-NEXT:    addi a0, a0, 2
-; RV64-NEXT:    andi a2, a1, 16
-; RV64-NEXT:    beqz a2, .LBB3_5
-; RV64-NEXT:  .LBB3_13: # %cond.load13
-; RV64-NEXT:    flh fa5, 0(a0)
-; RV64-NEXT:    vsetivli zero, 5, e16, m1, tu, ma
-; RV64-NEXT:    vfmv.s.f v9, fa5
-; RV64-NEXT:    vslideup.vi v8, v9, 4
-; RV64-NEXT:    addi a0, a0, 2
-; RV64-NEXT:    andi a2, a1, 32
-; RV64-NEXT:    beqz a2, .LBB3_6
-; RV64-NEXT:  .LBB3_14: # %cond.load17
-; RV64-NEXT:    flh fa5, 0(a0)
-; RV64-NEXT:    vsetivli zero, 6, e16, m1, tu, ma
-; RV64-NEXT:    vfmv.s.f v9, fa5
-; RV64-NEXT:    vslideup.vi v8, v9, 5
-; RV64-NEXT:    addi a0, a0, 2
-; RV64-NEXT:    andi a2, a1, 64
-; RV64-NEXT:    beqz a2, .LBB3_7
-; RV64-NEXT:  .LBB3_15: # %cond.load21
-; RV64-NEXT:    flh fa5, 0(a0)
-; RV64-NEXT:    vsetivli zero, 7, e16, m1, tu, ma
-; RV64-NEXT:    vfmv.s.f v9, fa5
-; RV64-NEXT:    vslideup.vi v8, v9, 6
-; RV64-NEXT:    addi a0, a0, 2
-; RV64-NEXT:    andi a1, a1, -128
-; RV64-NEXT:    beqz a1, .LBB3_8
-; RV64-NEXT:  .LBB3_16: # %cond.load25
-; RV64-NEXT:    flh fa5, 0(a0)
 ; RV64-NEXT:    vsetivli zero, 8, e16, m1, ta, ma
-; RV64-NEXT:    vfmv.s.f v9, fa5
-; RV64-NEXT:    vslideup.vi v8, v9, 7
+; RV64-NEXT:    viota.m v9, v0
+; RV64-NEXT:    vsll.vi v9, v9, 1, v0.t
+; RV64-NEXT:    vsetvli zero, zero, e16, m1, ta, mu
+; RV64-NEXT:    vluxei16.v v8, (a0), v9, v0.t
 ; RV64-NEXT:    ret
   %res = call <8 x half> @llvm.masked.expandload.v8f16(ptr align 2 %base, <8 x i1> %mask, <8 x half> %src0)
   ret <8 x half>%res
@@ -380,24 +98,20 @@ declare <1 x float> @llvm.masked.expandload.v1f32(ptr, <1 x i1>, <1 x float>)
 define <1 x float> @expandload_v1f32(ptr %base, <1 x float> %src0, <1 x i1> %mask) {
 ; RV32-LABEL: expandload_v1f32:
 ; RV32:       # %bb.0:
-; RV32-NEXT:    vsetvli a1, zero, e8, mf8, ta, ma
-; RV32-NEXT:    vfirst.m a1, v0
-; RV32-NEXT:    bnez a1, .LBB4_2
-; RV32-NEXT:  # %bb.1: # %cond.load
 ; RV32-NEXT:    vsetivli zero, 1, e32, mf2, ta, ma
-; RV32-NEXT:    vle32.v v8, (a0)
-; RV32-NEXT:  .LBB4_2: # %else
+; RV32-NEXT:    viota.m v9, v0
+; RV32-NEXT:    vsll.vi v9, v9, 2, v0.t
+; RV32-NEXT:    vsetvli zero, zero, e32, mf2, ta, mu
+; RV32-NEXT:    vluxei32.v v8, (a0), v9, v0.t
 ; RV32-NEXT:    ret
 ;
 ; RV64-LABEL: expandload_v1f32:
 ; RV64:       # %bb.0:
-; RV64-NEXT:    vsetvli a1, zero, e8, mf8, ta, ma
-; RV64-NEXT:    vfirst.m a1, v0
-; RV64-NEXT:    bnez a1, .LBB4_2
-; RV64-NEXT:  # %bb.1: # %cond.load
 ; RV64-NEXT:    vsetivli zero, 1, e32, mf2, ta, ma
-; RV64-NEXT:    vle32.v v8, (a0)
-; RV64-NEXT:  .LBB4_2: # %else
+; RV64-NEXT:    viota.m v9, v0
+; RV64-NEXT:    vsll.vi v9, v9, 2, v0.t
+; RV64-NEXT:    vsetvli zero, zero, e32, mf2, ta, mu
+; RV64-NEXT:    vluxei32.v v8, (a0), v9, v0.t
 ; RV64-NEXT:    ret
   %res = call <1 x float> @llvm.masked.expandload.v1f32(ptr align 4 %base, <1 x i1> %mask, <1 x float> %src0)
   ret <1 x float>%res
@@ -407,54 +121,20 @@ declare <2 x float> @llvm.masked.expandload.v2f32(ptr, <2 x i1>, <2 x float>)
 define <2 x float> @expandload_v2f32(ptr %base, <2 x float> %src0, <2 x i1> %mask) {
 ; RV32-LABEL: expandload_v2f32:
 ; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 1, e8, m1, ta, ma
-; RV32-NEXT:    vmv.x.s a1, v0
-; RV32-NEXT:    andi a2, a1, 1
-; RV32-NEXT:    bnez a2, .LBB5_3
-; RV32-NEXT:  # %bb.1: # %else
-; RV32-NEXT:    andi a1, a1, 2
-; RV32-NEXT:    bnez a1, .LBB5_4
-; RV32-NEXT:  .LBB5_2: # %else2
-; RV32-NEXT:    ret
-; RV32-NEXT:  .LBB5_3: # %cond.load
-; RV32-NEXT:    flw fa5, 0(a0)
-; RV32-NEXT:    vsetvli zero, zero, e32, m4, tu, ma
-; RV32-NEXT:    vfmv.s.f v8, fa5
-; RV32-NEXT:    addi a0, a0, 4
-; RV32-NEXT:    andi a1, a1, 2
-; RV32-NEXT:    beqz a1, .LBB5_2
-; RV32-NEXT:  .LBB5_4: # %cond.load1
-; RV32-NEXT:    flw fa5, 0(a0)
-; RV32-NEXT:    vsetvli zero, zero, e32, m4, ta, ma
-; RV32-NEXT:    vfmv.s.f v9, fa5
 ; RV32-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
-; RV32-NEXT:    vslideup.vi v8, v9, 1
+; RV32-NEXT:    viota.m v9, v0
+; RV32-NEXT:    vsll.vi v9, v9, 2, v0.t
+; RV32-NEXT:    vsetvli zero, zero, e32, mf2, ta, mu
+; RV32-NEXT:    vluxei32.v v8, (a0), v9, v0.t
 ; RV32-NEXT:    ret
 ;
 ; RV64-LABEL: expandload_v2f32:
 ; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 1, e8, m1, ta, ma
-; RV64-NEXT:    vmv.x.s a1, v0
-; RV64-NEXT:    andi a2, a1, 1
-; RV64-NEXT:    bnez a2, .LBB5_3
-; RV64-NEXT:  # %bb.1: # %else
-; RV64-NEXT:    andi a1, a1, 2
-; RV64-NEXT:    bnez a1, .LBB5_4
-; RV64-NEXT:  .LBB5_2: # %else2
-; RV64-NEXT:    ret
-; RV64-NEXT:  .LBB5_3: # %cond.load
-; RV64-NEXT:    flw fa5, 0(a0)
-; RV64-NEXT:    vsetvli zero, zero, e32, m4, tu, ma
-; RV64-NEXT:    vfmv.s.f v8, fa5
-; RV64-NEXT:    addi a0, a0, 4
-; RV64-NEXT:    andi a1, a1, 2
-; RV64-NEXT:    beqz a1, .LBB5_2
-; RV64-NEXT:  .LBB5_4: # %cond.load1
-; RV64-NEXT:    flw fa5, 0(a0)
-; RV64-NEXT:    vsetvli zero, zero, e32, m4, ta, ma
-; RV64-NEXT:    vfmv.s.f v9, fa5
 ; RV64-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
-; RV64-NEXT:    vslideup.vi v8, v9, 1
+; RV64-NEXT:    viota.m v9, v0
+; RV64-NEXT:    vsll.vi v9, v9, 2, v0.t
+; RV64-NEXT:    vsetvli zero, zero, e32, mf2, ta, mu
+; RV64-NEXT:    vluxei32.v v8, (a0), v9, v0.t
 ; RV64-NEXT:    ret
   %res = call <2 x float> @llvm.masked.expandload.v2f32(ptr align 4 %base, <2 x i1> %mask, <2 x float> %src0)
   ret <2 x float>%res
@@ -464,98 +144,20 @@ declare <4 x float> @llvm.masked.expandload.v4f32(ptr, <4 x i1>, <4 x float>)
 define <4 x float> @expandload_v4f32(ptr %base, <4 x float> %src0, <4 x i1> %mask) {
 ; RV32-LABEL: expandload_v4f32:
 ; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 1, e8, m1, ta, ma
-; RV32-NEXT:    vmv.x.s a1, v0
-; RV32-NEXT:    andi a2, a1, 1
-; RV32-NEXT:    bnez a2, .LBB6_5
-; RV32-NEXT:  # %bb.1: # %else
-; RV32-NEXT:    andi a2, a1, 2
-; RV32-NEXT:    bnez a2, .LBB6_6
-; RV32-NEXT:  .LBB6_2: # %else2
-; RV32-NEXT:    andi a2, a1, 4
-; RV32-NEXT:    bnez a2, .LBB6_7
-; RV32-NEXT:  .LBB6_3: # %else6
-; RV32-NEXT:    andi a1, a1, 8
-; RV32-NEXT:    bnez a1, .LBB6_8
-; RV32-NEXT:  .LBB6_4: # %else10
-; RV32-NEXT:    ret
-; RV32-NEXT:  .LBB6_5: # %cond.load
-; RV32-NEXT:    flw fa5, 0(a0)
-; RV32-NEXT:    vsetvli zero, zero, e32, m4, tu, ma
-; RV32-NEXT:    vfmv.s.f v8, fa5
-; RV32-NEXT:    addi a0, a0, 4
-; RV32-NEXT:    andi a2, a1, 2
-; RV32-NEXT:    beqz a2, .LBB6_2
-; RV32-NEXT:  .LBB6_6: # %cond.load1
-; RV32-NEXT:    flw fa5, 0(a0)
-; RV32-NEXT:    vsetvli zero, zero, e32, m4, ta, ma
-; RV32-NEXT:    vfmv.s.f v9, fa5
-; RV32-NEXT:    vsetivli zero, 2, e32, m1, tu, ma
-; RV32-NEXT:    vslideup.vi v8, v9, 1
-; RV32-NEXT:    addi a0, a0, 4
-; RV32-NEXT:    andi a2, a1, 4
-; RV32-NEXT:    beqz a2, .LBB6_3
-; RV32-NEXT:  .LBB6_7: # %cond.load5
-; RV32-NEXT:    flw fa5, 0(a0)
-; RV32-NEXT:    vsetivli zero, 3, e32, m1, tu, ma
-; RV32-NEXT:    vfmv.s.f v9, fa5
-; RV32-NEXT:    vslideup.vi v8, v9, 2
-; RV32-NEXT:    addi a0, a0, 4
-; RV32-NEXT:    andi a1, a1, 8
-; RV32-NEXT:    beqz a1, .LBB6_4
-; RV32-NEXT:  .LBB6_8: # %cond.load9
-; RV32-NEXT:    flw fa5, 0(a0)
 ; RV32-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
-; RV32-NEXT:    vfmv.s.f v9, fa5
-; RV32-NEXT:    vslideup.vi v8, v9, 3
+; RV32-NEXT:    viota.m v9, v0
+; RV32-NEXT:    vsll.vi v9, v9, 2, v0.t
+; RV32-NEXT:    vsetvli zero, zero, e32, m1, ta, mu
+; RV32-NEXT:    vluxei32.v v8, (a0), v9, v0.t
 ; RV32-NEXT:    ret
 ;
 ; RV64-LABEL: expandload_v4f32:
 ; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 1, e8, m1, ta, ma
-; RV64-NEXT:    vmv.x.s a1, v0
-; RV64-NEXT:    andi a2, a1, 1
-; RV64-NEXT:    bnez a2, .LBB6_5
-; RV64-NEXT:  # %bb.1: # %else
-; RV64-NEXT:    andi a2, a1, 2
-; RV64-NEXT:    bnez a2, .LBB6_6
-; RV64-NEXT:  .LBB6_2: # %else2
-; RV64-NEXT:    andi a2, a1, 4
-; RV64-NEXT:    bnez a2, .LBB6_7
-; RV64-NEXT:  .LBB6_3: # %else6
-; RV64-NEXT:    andi a1, a1, 8
-; RV64-NEXT:    bnez a1, .LBB6_8
-; RV64-NEXT:  .LBB6_4: # %else10
-; RV64-NEXT:    ret
-; RV64-NEXT:  .LBB6_5: # %cond.load
-; RV64-NEXT:    flw fa5, 0(a0)
-; RV64-NEXT:    vsetvli zero, zero, e32, m4, tu, ma
-; RV64-NEXT:    vfmv.s.f v8, fa5
-; RV64-NEXT:    addi a0, a0, 4
-; RV64-NEXT:    andi a2, a1, 2
-; RV64-NEXT:    beqz a2, .LBB6_2
-; RV64-NEXT:  .LBB6_6: # %cond.load1
-; RV64-NEXT:    flw fa5, 0(a0)
-; RV64-NEXT:    vsetvli zero, zero, e32, m4, ta, ma
-; RV64-NEXT:    vfmv.s.f v9, fa5
-; RV64-NEXT:    vsetivli zero, 2, e32, m1, tu, ma
-; RV64-NEXT:    vslideup.vi v8, v9, 1
-; RV64-NEXT:    addi a0, a0, 4
-; RV64-NEXT:    andi a2, a1, 4
-; RV64-NEXT:    beqz a2, .LBB6_3
-; RV64-NEXT:  .LBB6_7: # %cond.load5
-; RV64-NEXT:    flw fa5, 0(a0)
-; RV64-NEXT:    vsetivli zero, 3, e32, m1, tu, ma
-; RV64-NEXT:    vfmv.s.f v9, fa5
-; RV64-NEXT:    vslideup.vi v8, v9, 2
-; RV64-NEXT:    addi a0, a0, 4
-; RV64-NEXT:    andi a1, a1, 8
-; RV64-NEXT:    beqz a1, .LBB6_4
-; RV64-NEXT:  .LBB6_8: # %cond.load9
-; RV64-NEXT:    flw fa5, 0(a0)
 ; RV64-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
-; RV64-NEXT:    vfmv.s.f v9, fa5
-; RV64-NEXT:    vslideup.vi v8, v9, 3
+; RV64-NEXT:    viota.m v9, v0
+; RV64-NEXT:    vsll.vi v9, v9, 2, v0.t
+; RV64-NEXT:    vsetvli zero, zero, e32, m1, ta, mu
+; RV64-NEXT:    vluxei32.v v8, (a0), v9, v0.t
 ; RV64-NEXT:    ret
   %res = call <4 x float> @llvm.masked.expandload.v4f32(ptr align 4 %base, <4 x i1> %mask, <4 x float> %src0)
   ret <4 x float>%res
@@ -565,186 +167,20 @@ declare <8 x float> @llvm.masked.expandload.v8f32(ptr, <8 x i1>, <8 x float>)
 define <8 x float> @expandload_v8f32(ptr %base, <8 x float> %src0, <8 x i1> %mask) {
 ; RV32-LABEL: expandload_v8f32:
 ; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 1, e8, m1, ta, ma
-; RV32-NEXT:    vmv.x.s a1, v0
-; RV32-NEXT:    andi a2, a1, 1
-; RV32-NEXT:    bnez a2, .LBB7_9
-; RV32-NEXT:  # %bb.1: # %else
-; RV32-NEXT:    andi a2, a1, 2
-; RV32-NEXT:    bnez a2, .LBB7_10
-; RV32-NEXT:  .LBB7_2: # %else2
-; RV32-NEXT:    andi a2, a1, 4
-; RV32-NEXT:    bnez a2, .LBB7_11
-; RV32-NEXT:  .LBB7_3: # %else6
-; RV32-NEXT:    andi a2, a1, 8
-; RV32-NEXT:    bnez a2, .LBB7_12
-; RV32-NEXT:  .LBB7_4: # %else10
-; RV32-NEXT:    andi a2, a1, 16
-; RV32-NEXT:    bnez a2, .LBB7_13
-; RV32-NEXT:  .LBB7_5: # %else14
-; RV32-NEXT:    andi a2, a1, 32
-; RV32-NEXT:    bnez a2, .LBB7_14
-; RV32-NEXT:  .LBB7_6: # %else18
-; RV32-NEXT:    andi a2, a1, 64
-; RV32-NEXT:    bnez a2, .LBB7_15
-; RV32-NEXT:  .LBB7_7: # %else22
-; RV32-NEXT:    andi a1, a1, -128
-; RV32-NEXT:    bnez a1, .LBB7_16
-; RV32-NEXT:  .LBB7_8: # %else26
-; RV32-NEXT:    ret
-; RV32-NEXT:  .LBB7_9: # %cond.load
-; RV32-NEXT:    flw fa5, 0(a0)
-; RV32-NEXT:    vsetvli zero, zero, e32, m4, tu, ma
-; RV32-NEXT:    vfmv.s.f v8, fa5
-; RV32-NEXT:    addi a0, a0, 4
-; RV32-NEXT:    andi a2, a1, 2
-; RV32-NEXT:    beqz a2, .LBB7_2
-; RV32-NEXT:  .LBB7_10: # %cond.load1
-; RV32-NEXT:    flw fa5, 0(a0)
-; RV32-NEXT:    vsetvli zero, zero, e32, m4, ta, ma
-; RV32-NEXT:    vfmv.s.f v10, fa5
-; RV32-NEXT:    vsetivli zero, 2, e32, m1, tu, ma
-; RV32-NEXT:    vslideup.vi v8, v10, 1
-; RV32-NEXT:    addi a0, a0, 4
-; RV32-NEXT:    andi a2, a1, 4
-; RV32-NEXT:    beqz a2, .LBB7_3
-; RV32-NEXT:  .LBB7_11: # %cond.load5
-; RV32-NEXT:    flw fa5, 0(a0)
-; RV32-NEXT:    vsetivli zero, 3, e32, m1, tu, ma
-; RV32-NEXT:    vfmv.s.f v10, fa5
-; RV32-NEXT:    vslideup.vi v8, v10, 2
-; RV32-NEXT:    addi a0, a0, 4
-; RV32-NEXT:    andi a2, a1, 8
-; RV32-NEXT:    beqz a2, .LBB7_4
-; RV32-NEXT:  .LBB7_12: # %cond.load9
-; RV32-NEXT:    flw fa5, 0(a0)
-; RV32-NEXT:    vsetivli zero, 4, e32, m1, tu, ma
-; RV32-NEXT:    vfmv.s.f v10, fa5
-; RV32-NEXT:    vslideup.vi v8, v10, 3
-; RV32-NEXT:    addi a0, a0, 4
-; RV32-NEXT:    andi a2, a1, 16
-; RV32-NEXT:    beqz a2, .LBB7_5
-; RV32-NEXT:  .LBB7_13: # %cond.load13
-; RV32-NEXT:    flw fa5, 0(a0)
-; RV32-NEXT:    vsetivli zero, 5, e32, m2, tu, ma
-; RV32-NEXT:    vfmv.s.f v10, fa5
-; RV32-NEXT:    vslideup.vi v8, v10, 4
-; RV32-NEXT:    addi a0, a0, 4
-; RV32-NEXT:    andi a2, a1, 32
-; RV32-NEXT:    beqz a2, .LBB7_6
-; RV32-NEXT:  .LBB7_14: # %cond.load17
-; RV32-NEXT:    flw fa5, 0(a0)
-; RV32-NEXT:    vsetivli zero, 6, e32, m2, tu, ma
-; RV32-NEXT:    vfmv.s.f v10, fa5
-; RV32-NEXT:    vslideup.vi v8, v10, 5
-; RV32-NEXT:    addi a0, a0, 4
-; RV32-NEXT:    andi a2, a1, 64
-; RV32-NEXT:    beqz a2, .LBB7_7
-; RV32-NEXT:  .LBB7_15: # %cond.load21
-; RV32-NEXT:    flw fa5, 0(a0)
-; RV32-NEXT:    vsetivli zero, 7, e32, m2, tu, ma
-; RV32-NEXT:    vfmv.s.f v10, fa5
-; RV32-NEXT:    vslideup.vi v8, v10, 6
-; RV32-NEXT:    addi a0, a0, 4
-; RV32-NEXT:    andi a1, a1, -128
-; RV32-NEXT:    beqz a1, .LBB7_8
-; RV32-NEXT:  .LBB7_16: # %cond.load25
-; RV32-NEXT:    flw fa5, 0(a0)
 ; RV32-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
-; RV32-NEXT:    vfmv.s.f v10, fa5
-; RV32-NEXT:    vslideup.vi v8, v10, 7
+; RV32-NEXT:    viota.m v10, v0
+; RV32-NEXT:    vsll.vi v10, v10, 2, v0.t
+; RV32-NEXT:    vsetvli zero, zero, e32, m2, ta, mu
+; RV32-NEXT:    vluxei32.v v8, (a0), v10, v0.t
 ; RV32-NEXT:    ret
 ;
 ; RV64-LABEL: expandload_v8f32:
 ; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 1, e8, m1, ta, ma
-; RV64-NEXT:    vmv.x.s a1, v0
-; RV64-NEXT:    andi a2, a1, 1
-; RV64-NEXT:    bnez a2, .LBB7_9
-; RV64-NEXT:  # %bb.1: # %else
-; RV64-NEXT:    andi a2, a1, 2
-; RV64-NEXT:    bnez a2, .LBB7_10
-; RV64-NEXT:  .LBB7_2: # %else2
-; RV64-NEXT:    andi a2, a1, 4
-; RV64-NEXT:    bnez a2, .LBB7_11
-; RV64-NEXT:  .LBB7_3: # %else6
-; RV64-NEXT:    andi a2, a1, 8
-; RV64-NEXT:    bnez a2, .LBB7_12
-; RV64-NEXT:  .LBB7_4: # %else10
-; RV64-NEXT:    andi a2, a1, 16
-; RV64-NEXT:    bnez a2, .LBB7_13
-; RV64-NEXT:  .LBB7_5: # %else14
-; RV64-NEXT:    andi a2, a1, 32
-; RV64-NEXT:    bnez a2, .LBB7_14
-; RV64-NEXT:  .LBB7_6: # %else18
-; RV64-NEXT:    andi a2, a1, 64
-; RV64-NEXT:    bnez a2, .LBB7_15
-; RV64-NEXT:  .LBB7_7: # %else22
-; RV64-NEXT:    andi a1, a1, -128
-; RV64-NEXT:    bnez a1, .LBB7_16
-; RV64-NEXT:  .LBB7_8: # %else26
-; RV64-NEXT:    ret
-; RV64-NEXT:  .LBB7_9: # %cond.load
-; RV64-NEXT:    flw fa5, 0(a0)
-; RV64-NEXT:    vsetvli zero, zero, e32, m4, tu, ma
-; RV64-NEXT:    vfmv.s.f v8, fa5
-; RV64-NEXT:    addi a0, a0, 4
-; RV64-NEXT:    andi a2, a1, 2
-; RV64-NEXT:    beqz a2, .LBB7_2
-; RV64-NEXT:  .LBB7_10: # %cond.load1
-; RV64-NEXT:    flw fa5, 0(a0)
-; RV64-NEXT:    vsetvli zero, zero, e32, m4, ta, ma
-; RV64-NEXT:    vfmv.s.f v10, fa5
-; RV64-NEXT:    vsetivli zero, 2, e32, m1, tu, ma
-; RV64-NEXT:    vslideup.vi v8, v10, 1
-; RV64-NEXT:    addi a0, a0, 4
-; RV64-NEXT:    andi a2, a1, 4
-; RV64-NEXT:    beqz a2, .LBB7_3
-; RV64-NEXT:  .LBB7_11: # %cond.load5
-; RV64-NEXT:    flw fa5, 0(a0)
-; RV64-NEXT:    vsetivli zero, 3, e32, m1, tu, ma
-; RV64-NEXT:    vfmv.s.f v10, fa5
-; RV64-NEXT:    vslideup.vi v8, v10, 2
-; RV64-NEXT:    addi a0, a0, 4
-; RV64-NEXT:    andi a2, a1, 8
-; RV64-NEXT:    beqz a2, .LBB7_4
-; RV64-NEXT:  .LBB7_12: # %cond.load9
-; RV64-NEXT:    flw fa5, 0(a0)
-; RV64-NEXT:    vsetivli zero, 4, e32, m1, tu, ma
-; RV64-NEXT:    vfmv.s.f v10, fa5
-; RV64-NEXT:    vslideup.vi v8, v10, 3
-; RV64-NEXT:    addi a0, a0, 4
-; RV64-NEXT:    andi a2, a1, 16
-; RV64-NEXT:    beqz a2, .LBB7_5
-; RV64-NEXT:  .LBB7_13: # %cond.load13
-; RV64-NEXT:    flw fa5, 0(a0)
-; RV64-NEXT:    vsetivli zero, 5, e32, m2, tu, ma
-; RV64-NEXT:    vfmv.s.f v10, fa5
-; RV64-NEXT:    vslideup.vi v8, v10, 4
-; RV64-NEXT:    addi a0, a0, 4
-; RV64-NEXT:    andi a2, a1, 32
-; RV64-NEXT:    beqz a2, .LBB7_6
-; RV64-NEXT:  .LBB7_14: # %cond.load17
-; RV64-NEXT:    flw fa5, 0(a0)
-; RV64-NEXT:    vsetivli zero, 6, e32, m2, tu, ma
-; RV64-NEXT:    vfmv.s.f v10, fa5
-; RV64-NEXT:    vslideup.vi v8, v10, 5
-; RV64-NEXT:    addi a0, a0, 4
-; RV64-NEXT:    andi a2, a1, 64
-; RV64-NEXT:    beqz a2, .LBB7_7
-; RV64-NEXT:  .LBB7_15: # %cond.load21
-; RV64-NEXT:    flw fa5, 0(a0)
-; RV64-NEXT:    vsetivli zero, 7, e32, m2, tu, ma
-; RV64-NEXT:    vfmv.s.f v10, fa5
-; RV64-NEXT:    vslideup.vi v8, v10, 6
-; RV64-NEXT:    addi a0, a0, 4
-; RV64-NEXT:    andi a1, a1, -128
-; RV64-NEXT:    beqz a1, .LBB7_8
-; RV64-NEXT:  .LBB7_16: # %cond.load25
-; RV64-NEXT:    flw fa5, 0(a0)
 ; RV64-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
-; RV64-NEXT:    vfmv.s.f v10, fa5
-; RV64-NEXT:    vslideup.vi v8, v10, 7
+; RV64-NEXT:    viota.m v10, v0
+; RV64-NEXT:    vsll.vi v10, v10, 2, v0.t
+; RV64-NEXT:    vsetvli zero, zero, e32, m2, ta, mu
+; RV64-NEXT:    vluxei32.v v8, (a0), v10, v0.t
 ; RV64-NEXT:    ret
   %res = call <8 x float> @llvm.masked.expandload.v8f32(ptr align 4 %base, <8 x i1> %mask, <8 x float> %src0)
   ret <8 x float>%res
@@ -754,24 +190,20 @@ declare <1 x double> @llvm.masked.expandload.v1f64(ptr, <1 x i1>, <1 x double>)
 define <1 x double> @expandload_v1f64(ptr %base, <1 x double> %src0, <1 x i1> %mask) {
 ; RV32-LABEL: expandload_v1f64:
 ; RV32:       # %bb.0:
-; RV32-NEXT:    vsetvli a1, zero, e8, mf8, ta, ma
-; RV32-NEXT:    vfirst.m a1, v0
-; RV32-NEXT:    bnez a1, .LBB8_2
-; RV32-NEXT:  # %bb.1: # %cond.load
-; RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; RV32-NEXT:    vle64.v v8, (a0)
-; RV32-NEXT:  .LBB8_2: # %else
+; RV32-NEXT:    vsetivli zero, 1, e32, mf2, ta, ma
+; RV32-NEXT:    viota.m v9, v0
+; RV32-NEXT:    vsll.vi v9, v9, 3, v0.t
+; RV32-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
+; RV32-NEXT:    vluxei32.v v8, (a0), v9, v0.t
 ; RV32-NEXT:    ret
 ;
 ; RV64-LABEL: expandload_v1f64:
 ; RV64:       # %bb.0:
-; RV64-NEXT:    vsetvli a1, zero, e8, mf8, ta, ma
-; RV64-NEXT:    vfirst.m a1, v0
-; RV64-NEXT:    bnez a1, .LBB8_2
-; RV64-NEXT:  # %bb.1: # %cond.load
 ; RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; RV64-NEXT:    vle64.v v8, (a0)
-; RV64-NEXT:  .LBB8_2: # %else
+; RV64-NEXT:    viota.m v9, v0
+; RV64-NEXT:    vsll.vi v9, v9, 3, v0.t
+; RV64-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
+; RV64-NEXT:    vluxei64.v v8, (a0), v9, v0.t
 ; RV64-NEXT:    ret
   %res = call <1 x double> @llvm.masked.expandload.v1f64(ptr align 8 %base, <1 x i1> %mask, <1 x double> %src0)
   ret <1 x double>%res
@@ -781,54 +213,20 @@ declare <2 x double> @llvm.masked.expandload.v2f64(ptr, <2 x i1>, <2 x double>)
 define <2 x double> @expandload_v2f64(ptr %base, <2 x double> %src0, <2 x i1> %mask) {
 ; RV32-LABEL: expandload_v2f64:
 ; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 1, e8, m1, ta, ma
-; RV32-NEXT:    vmv.x.s a1, v0
-; RV32-NEXT:    andi a2, a1, 1
-; RV32-NEXT:    bnez a2, .LBB9_3
-; RV32-NEXT:  # %bb.1: # %else
-; RV32-NEXT:    andi a1, a1, 2
-; RV32-NEXT:    bnez a1, .LBB9_4
-; RV32-NEXT:  .LBB9_2: # %else2
-; RV32-NEXT:    ret
-; RV32-NEXT:  .LBB9_3: # %cond.load
-; RV32-NEXT:    fld fa5, 0(a0)
-; RV32-NEXT:    vsetvli zero, zero, e64, m8, tu, ma
-; RV32-NEXT:    vfmv.s.f v8, fa5
-; RV32-NEXT:    addi a0, a0, 8
-; RV32-NEXT:    andi a1, a1, 2
-; RV32-NEXT:    beqz a1, .LBB9_2
-; RV32-NEXT:  .LBB9_4: # %cond.load1
-; RV32-NEXT:    fld fa5, 0(a0)
-; RV32-NEXT:    vsetvli zero, zero, e64, m8, ta, ma
-; RV32-NEXT:    vfmv.s.f v9, fa5
-; RV32-NEXT:    vsetivli zero, 2, e64, m1, ta, ma
-; RV32-NEXT:    vslideup.vi v8, v9, 1
+; RV32-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
+; RV32-NEXT:    viota.m v9, v0
+; RV32-NEXT:    vsll.vi v9, v9, 3, v0.t
+; RV32-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
+; RV32-NEXT:    vluxei32.v v8, (a0), v9, v0.t
 ; RV32-NEXT:    ret
 ;
 ; RV64-LABEL: expandload_v2f64:
 ; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 1, e8, m1, ta, ma
-; RV64-NEXT:    vmv.x.s a1, v0
-; RV64-NEXT:    andi a2, a1, 1
-; RV64-NEXT:    bnez a2, .LBB9_3
-; RV64-NEXT:  # %bb.1: # %else
-; RV64-NEXT:    andi a1, a1, 2
-; RV64-NEXT:    bnez a1, .LBB9_4
-; RV64-NEXT:  .LBB9_2: # %else2
-; RV64-NEXT:    ret
-; RV64-NEXT:  .LBB9_3: # %cond.load
-; RV64-NEXT:    fld fa5, 0(a0)
-; RV64-NEXT:    vsetvli zero, zero, e64, m8, tu, ma
-; RV64-NEXT:    vfmv.s.f v8, fa5
-; RV64-NEXT:    addi a0, a0, 8
-; RV64-NEXT:    andi a1, a1, 2
-; RV64-NEXT:    beqz a1, .LBB9_2
-; RV64-NEXT:  .LBB9_4: # %cond.load1
-; RV64-NEXT:    fld fa5, 0(a0)
-; RV64-NEXT:    vsetvli zero, zero, e64, m8, ta, ma
-; RV64-NEXT:    vfmv.s.f v9, fa5
 ; RV64-NEXT:    vsetivli zero, 2, e64, m1, ta, ma
-; RV64-NEXT:    vslideup.vi v8, v9, 1
+; RV64-NEXT:    viota.m v9, v0
+; RV64-NEXT:    vsll.vi v9, v9, 3, v0.t
+; RV64-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
+; RV64-NEXT:    vluxei64.v v8, (a0), v9, v0.t
 ; RV64-NEXT:    ret
   %res = call <2 x double> @llvm.masked.expandload.v2f64(ptr align 8 %base, <2 x i1> %mask, <2 x double> %src0)
   ret <2 x double>%res
@@ -838,98 +236,20 @@ declare <4 x double> @llvm.masked.expandload.v4f64(ptr, <4 x i1>, <4 x double>)
 define <4 x double> @expandload_v4f64(ptr %base, <4 x double> %src0, <4 x i1> %mask) {
 ; RV32-LABEL: expandload_v4f64:
 ; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 1, e8, m1, ta, ma
-; RV32-NEXT:    vmv.x.s a1, v0
-; RV32-NEXT:    andi a2, a1, 1
-; RV32-NEXT:    bnez a2, .LBB10_5
-; RV32-NEXT:  # %bb.1: # %else
-; RV32-NEXT:    andi a2, a1, 2
-; RV32-NEXT:    bnez a2, .LBB10_6
-; RV32-NEXT:  .LBB10_2: # %else2
-; RV32-NEXT:    andi a2, a1, 4
-; RV32-NEXT:    bnez a2, .LBB10_7
-; RV32-NEXT:  .LBB10_3: # %else6
-; RV32-NEXT:    andi a1, a1, 8
-; RV32-NEXT:    bnez a1, .LBB10_8
-; RV32-NEXT:  .LBB10_4: # %else10
-; RV32-NEXT:    ret
-; RV32-NEXT:  .LBB10_5: # %cond.load
-; RV32-NEXT:    fld fa5, 0(a0)
-; RV32-NEXT:    vsetvli zero, zero, e64, m8, tu, ma
-; RV32-NEXT:    vfmv.s.f v8, fa5
-; RV32-NEXT:    addi a0, a0, 8
-; RV32-NEXT:    andi a2, a1, 2
-; RV32-NEXT:    beqz a2, .LBB10_2
-; RV32-NEXT:  .LBB10_6: # %cond.load1
-; RV32-NEXT:    fld fa5, 0(a0)
-; RV32-NEXT:    vsetvli zero, zero, e64, m8, ta, ma
-; RV32-NEXT:    vfmv.s.f v10, fa5
-; RV32-NEXT:    vsetivli zero, 2, e64, m1, tu, ma
-; RV32-NEXT:    vslideup.vi v8, v10, 1
-; RV32-NEXT:    addi a0, a0, 8
-; RV32-NEXT:    andi a2, a1, 4
-; RV32-NEXT:    beqz a2, .LBB10_3
-; RV32-NEXT:  .LBB10_7: # %cond.load5
-; RV32-NEXT:    fld fa5, 0(a0)
-; RV32-NEXT:    vsetivli zero, 3, e64, m2, tu, ma
-; RV32-NEXT:    vfmv.s.f v10, fa5
-; RV32-NEXT:    vslideup.vi v8, v10, 2
-; RV32-NEXT:    addi a0, a0, 8
-; RV32-NEXT:    andi a1, a1, 8
-; RV32-NEXT:    beqz a1, .LBB10_4
-; RV32-NEXT:  .LBB10_8: # %cond.load9
-; RV32-NEXT:    fld fa5, 0(a0)
-; RV32-NEXT:    vsetivli zero, 4, e64, m2, ta, ma
-; RV32-NEXT:    vfmv.s.f v10, fa5
-; RV32-NEXT:    vslideup.vi v8, v10, 3
+; RV32-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
+; RV32-NEXT:    viota.m v10, v0
+; RV32-NEXT:    vsll.vi v10, v10, 3, v0.t
+; RV32-NEXT:    vsetvli zero, zero, e64, m2, ta, mu
+; RV32-NEXT:    vluxei32.v v8, (a0), v10, v0.t
 ; RV32-NEXT:    ret
 ;
 ; RV64-LABEL: expandload_v4f64:
 ; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 1, e8, m1, ta, ma
-; RV64-NEXT:    vmv.x.s a1, v0
-; RV64-NEXT:    andi a2, a1, 1
-; RV64-NEXT:    bnez a2, .LBB10_5
-; RV64-NEXT:  # %bb.1: # %else
-; RV64-NEXT:    andi a2, a1, 2
-; RV64-NEXT:    bnez a2, .LBB10_6
-; RV64-NEXT:  .LBB10_2: # %else2
-; RV64-NEXT:    andi a2, a1, 4
-; RV64-NEXT:    bnez a2, .LBB10_7
-; RV64-NEXT:  .LBB10_3: # %else6
-; RV64-NEXT:    andi a1, a1, 8
-; RV64-NEXT:    bnez a1, .LBB10_8
-; RV64-NEXT:  .LBB10_4: # %else10
-; RV64-NEXT:    ret
-; RV64-NEXT:  .LBB10_5: # %cond.load
-; RV64-NEXT:    fld fa5, 0(a0)
-; RV64-NEXT:    vsetvli zero, zero, e64, m8, tu, ma
-; RV64-NEXT:    vfmv.s.f v8, fa5
-; RV64-NEXT:    addi a0, a0, 8
-; RV64-NEXT:    andi a2, a1, 2
-; RV64-NEXT:    beqz a2, .LBB10_2
-; RV64-NEXT:  .LBB10_6: # %cond.load1
-; RV64-NEXT:    fld fa5, 0(a0)
-; RV64-NEXT:    vsetvli zero, zero, e64, m8, ta, ma
-; RV64-NEXT:    vfmv.s.f v10, fa5
-; RV64-NEXT:    vsetivli zero, 2, e64, m1, tu, ma
-; RV64-NEXT:    vslideup.vi v8, v10, 1
-; RV64-NEXT:    addi a0, a0, 8
-; RV64-NEXT:    andi a2, a1, 4
-; RV64-NEXT:    beqz a2, .LBB10_3
-; RV64-NEXT:  .LBB10_7: # %cond.load5
-; RV64-NEXT:    fld fa5, 0(a0)
-; RV64-NEXT:    vsetivli zero, 3, e64, m2, tu, ma
-; RV64-NEXT:    vfmv.s.f v10, fa5
-; RV64-NEXT:    vslideup.vi v8, v10, 2
-; RV64-NEXT:    addi a0, a0, 8
-; RV64-NEXT:    andi a1, a1, 8
-; RV64-NEXT:    beqz a1, .LBB10_4
-; RV64-NEXT:  .LBB10_8: # %cond.load9
-; RV64-NEXT:    fld fa5, 0(a0)
 ; RV64-NEXT:    vsetivli zero, 4, e64, m2, ta, ma
-; RV64-NEXT:    vfmv.s.f v10, fa5
-; RV64-NEXT:    vslideup.vi v8, v10, 3
+; RV64-NEXT:    viota.m v10, v0
+; RV64-NEXT:    vsll.vi v10, v10, 3, v0.t
+; RV64-NEXT:    vsetvli zero, zero, e64, m2, ta, mu
+; RV64-NEXT:    vluxei64.v v8, (a0), v10, v0.t
 ; RV64-NEXT:    ret
   %res = call <4 x double> @llvm.masked.expandload.v4f64(ptr align 8 %base, <4 x i1> %mask, <4 x double> %src0)
   ret <4 x double>%res
@@ -939,186 +259,20 @@ declare <8 x double> @llvm.masked.expandload.v8f64(ptr, <8 x i1>, <8 x double>)
 define <8 x double> @expandload_v8f64(ptr %base, <8 x double> %src0, <8 x i1> %mask) {
 ; RV32-LABEL: expandload_v8f64:
 ; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 1, e8, m1, ta, ma
-; RV32-NEXT:    vmv.x.s a1, v0
-; RV32-NEXT:    andi a2, a1, 1
-; RV32-NEXT:    bnez a2, .LBB11_9
-; RV32-NEXT:  # %bb.1: # %else
-; RV32-NEXT:    andi a2, a1, 2
-; RV32-NEXT:    bnez a2, .LBB11_10
-; RV32-NEXT:  .LBB11_2: # %else2
-; RV32-NEXT:    andi a2, a1, 4
-; RV32-NEXT:    bnez a2, .LBB11_11
-; RV32-NEXT:  .LBB11_3: # %else6
-; RV32-NEXT:    andi a2, a1, 8
-; RV32-NEXT:    bnez a2, .LBB11_12
-; RV32-NEXT:  .LBB11_4: # %else10
-; RV32-NEXT:    andi a2, a1, 16
-; RV32-NEXT:    bnez a2, .LBB11_13
-; RV32-NEXT:  .LBB11_5: # %else14
-; RV32-NEXT:    andi a2, a1, 32
-; RV32-NEXT:    bnez a2, .LBB11_14
-; RV32-NEXT:  .LBB11_6: # %else18
-; RV32-NEXT:    andi a2, a1, 64
-; RV32-NEXT:    bnez a2, .LBB11_15
-; RV32-NEXT:  .LBB11_7: # %else22
-; RV32-NEXT:    andi a1, a1, -128
-; RV32-NEXT:    bnez a1, .LBB11_16
-; RV32-NEXT:  .LBB11_8: # %else26
-; RV32-NEXT:    ret
-; RV32-NEXT:  .LBB11_9: # %cond.load
-; RV32-NEXT:    fld fa5, 0(a0)
-; RV32-NEXT:    vsetvli zero, zero, e64, m8, tu, ma
-; RV32-NEXT:    vfmv.s.f v8, fa5
-; RV32-NEXT:    addi a0, a0, 8
-; RV32-NEXT:    andi a2, a1, 2
-; RV32-NEXT:    beqz a2, .LBB11_2
-; RV32-NEXT:  .LBB11_10: # %cond.load1
-; RV32-NEXT:    fld fa5, 0(a0)
-; RV32-NEXT:    vsetvli zero, zero, e64, m8, ta, ma
-; RV32-NEXT:    vfmv.s.f v12, fa5
-; RV32-NEXT:    vsetivli zero, 2, e64, m1, tu, ma
-; RV32-NEXT:    vslideup.vi v8, v12, 1
-; RV32-NEXT:    addi a0, a0, 8
-; RV32-NEXT:    andi a2, a1, 4
-; RV32-NEXT:    beqz a2, .LBB11_3
-; RV32-NEXT:  .LBB11_11: # %cond.load5
-; RV32-NEXT:    fld fa5, 0(a0)
-; RV32-NEXT:    vsetivli zero, 3, e64, m2, tu, ma
-; RV32-NEXT:    vfmv.s.f v12, fa5
-; RV32-NEXT:    vslideup.vi v8, v12, 2
-; RV32-NEXT:    addi a0, a0, 8
-; RV32-NEXT:    andi a2, a1, 8
-; RV32-NEXT:    beqz a2, .LBB11_4
-; RV32-NEXT:  .LBB11_12: # %cond.load9
-; RV32-NEXT:    fld fa5, 0(a0)
-; RV32-NEXT:    vsetivli zero, 4, e64, m2, tu, ma
-; RV32-NEXT:    vfmv.s.f v12, fa5
-; RV32-NEXT:    vslideup.vi v8, v12, 3
-; RV32-NEXT:    addi a0, a0, 8
-; RV32-NEXT:    andi a2, a1, 16
-; RV32-NEXT:    beqz a2, .LBB11_5
-; RV32-NEXT:  .LBB11_13: # %cond.load13
-; RV32-NEXT:    fld fa5, 0(a0)
-; RV32-NEXT:    vsetivli zero, 5, e64, m4, tu, ma
-; RV32-NEXT:    vfmv.s.f v12, fa5
-; RV32-NEXT:    vslideup.vi v8, v12, 4
-; RV32-NEXT:    addi a0, a0, 8
-; RV32-NEXT:    andi a2, a1, 32
-; RV32-NEXT:    beqz a2, .LBB11_6
-; RV32-NEXT:  .LBB11_14: # %cond.load17
-; RV32-NEXT:    fld fa5, 0(a0)
-; RV32-NEXT:    vsetivli zero, 6, e64, m4, tu, ma
-; RV32-NEXT:    vfmv.s.f v12, fa5
-; RV32-NEXT:    vslideup.vi v8, v12, 5
-; RV32-NEXT:    addi a0, a0, 8
-; RV32-NEXT:    andi a2, a1, 64
-; RV32-NEXT:    beqz a2, .LBB11_7
-; RV32-NEXT:  .LBB11_15: # %cond.load21
-; RV32-NEXT:    fld fa5, 0(a0)
-; RV32-NEXT:    vsetivli zero, 7, e64, m4, tu, ma
-; RV32-NEXT:    vfmv.s.f v12, fa5
-; RV32-NEXT:    vslideup.vi v8, v12, 6
-; RV32-NEXT:    addi a0, a0, 8
-; RV32-NEXT:    andi a1, a1, -128
-; RV32-NEXT:    beqz a1, .LBB11_8
-; RV32-NEXT:  .LBB11_16: # %cond.load25
-; RV32-NEXT:    fld fa5, 0(a0)
-; RV32-NEXT:    vsetivli zero, 8, e64, m4, ta, ma
-; RV32-NEXT:    vfmv.s.f v12, fa5
-; RV32-NEXT:    vslideup.vi v8, v12, 7
+; RV32-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
+; RV32-NEXT:    viota.m v12, v0
+; RV32-NEXT:    vsll.vi v12, v12, 3, v0.t
+; RV32-NEXT:    vsetvli zero, zero, e64, m4, ta, mu
+; RV32-NEXT:    vluxei32.v v8, (a0), v12, v0.t
 ; RV32-NEXT:    ret
 ;
 ; RV64-LABEL: expandload_v8f64:
 ; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 1, e8, m1, ta, ma
-; RV64-NEXT:    vmv.x.s a1, v0
-; RV64-NEXT:    andi a2, a1, 1
-; RV64-NEXT:    bnez a2, .LBB11_9
-; RV64-NEXT:  # %bb.1: # %else
-; RV64-NEXT:    andi a2, a1, 2
-; RV64-NEXT:    bnez a2, .LBB11_10
-; RV64-NEXT:  .LBB11_2: # %else2
-; RV64-NEXT:    andi a2, a1, 4
-; RV64-NEXT:    bnez a2, .LBB11_11
-; RV64-NEXT:  .LBB11_3: # %else6
-; RV64-NEXT:    andi a2, a1, 8
-; RV64-NEXT:    bnez a2, .LBB11_12
-; RV64-NEXT:  .LBB11_4: # %else10
-; RV64-NEXT:    andi a2, a1, 16
-; RV64-NEXT:    bnez a2, .LBB11_13
-; RV64-NEXT:  .LBB11_5: # %else14
-; RV64-NEXT:    andi a2, a1, 32
-; RV64-NEXT:    bnez a2, .LBB11_14
-; RV64-NEXT:  .LBB11_6: # %else18
-; RV64-NEXT:    andi a2, a1, 64
-; RV64-NEXT:    bnez a2, .LBB11_15
-; RV64-NEXT:  .LBB11_7: # %else22
-; RV64-NEXT:    andi a1, a1, -128
-; RV64-NEXT:    bnez a1, .LBB11_16
-; RV64-NEXT:  .LBB11_8: # %else26
-; RV64-NEXT:    ret
-; RV64-NEXT:  .LBB11_9: # %cond.load
-; RV64-NEXT:    fld fa5, 0(a0)
-; RV64-NEXT:    vsetvli zero, zero, e64, m8, tu, ma
-; RV64-NEXT:    vfmv.s.f v8, fa5
-; RV64-NEXT:    addi a0, a0, 8
-; RV64-NEXT:    andi a2, a1, 2
-; RV64-NEXT:    beqz a2, .LBB11_2
-; RV64-NEXT:  .LBB11_10: # %cond.load1
-; RV64-NEXT:    fld fa5, 0(a0)
-; RV64-NEXT:    vsetvli zero, zero, e64, m8, ta, ma
-; RV64-NEXT:    vfmv.s.f v12, fa5
-; RV64-NEXT:    vsetivli zero, 2, e64, m1, tu, ma
-; RV64-NEXT:    vslideup.vi v8, v12, 1
-; RV64-NEXT:    addi a0, a0, 8
-; RV64-NEXT:    andi a2, a1, 4
-; RV64-NEXT:    beqz a2, .LBB11_3
-; RV64-NEXT:  .LBB11_11: # %cond.load5
-; RV64-NEXT:    fld fa5, 0(a0)
-; RV64-NEXT:    vsetivli zero, 3, e64, m2, tu, ma
-; RV64-NEXT:    vfmv.s.f v12, fa5
-; RV64-NEXT:    vslideup.vi v8, v12, 2
-; RV64-NEXT:    addi a0, a0, 8
-; RV64-NEXT:    andi a2, a1, 8
-; RV64-NEXT:    beqz a2, .LBB11_4
-; RV64-NEXT:  .LBB11_12: # %cond.load9
-; RV64-NEXT:    fld fa5, 0(a0)
-; RV64-NEXT:    vsetivli zero, 4, e64, m2, tu, ma
-; RV64-NEXT:    vfmv.s.f v12, fa5
-; RV64-NEXT:    vslideup.vi v8, v12, 3
-; RV64-NEXT:    addi a0, a0, 8
-; RV64-NEXT:    andi a2, a1, 16
-; RV64-NEXT:    beqz a2, .LBB11_5
-; RV64-NEXT:  .LBB11_13: # %cond.load13
-; RV64-NEXT:    fld fa5, 0(a0)
-; RV64-NEXT:    vsetivli zero, 5, e64, m4, tu, ma
-; RV64-NEXT:    vfmv.s.f v12, fa5
-; RV64-NEXT:    vslideup.vi v8, v12, 4
-; RV64-NEXT:    addi a0, a0, 8
-; RV64-NEXT:    andi a2, a1, 32
-; RV64-NEXT:    beqz a2, .LBB11_6
-; RV64-NEXT:  .LBB11_14: # %cond.load17
-; RV64-NEXT:    fld fa5, 0(a0)
-; RV64-NEXT:    vsetivli zero, 6, e64, m4, tu, ma
-; RV64-NEXT:    vfmv.s.f v12, fa5
-; RV64-NEXT:    vslideup.vi v8, v12, 5
-; RV64-NEXT:    addi a0, a0, 8
-; RV64-NEXT:    andi a2, a1, 64
-; RV64-NEXT:    beqz a2, .LBB11_7
-; RV64-NEXT:  .LBB11_15: # %cond.load21
-; RV64-NEXT:    fld fa5, 0(a0)
-; RV64-NEXT:    vsetivli zero, 7, e64, m4, tu, ma
-; RV64-NEXT:    vfmv.s.f v12, fa5
-; RV64-NEXT:    vslideup.vi v8, v12, 6
-; RV64-NEXT:    addi a0, a0, 8
-; RV64-NEXT:    andi a1, a1, -128
-; RV64-NEXT:    beqz a1, .LBB11_8
-; RV64-NEXT:  .LBB11_16: # %cond.load25
-; RV64-NEXT:    fld fa5, 0(a0)
 ; RV64-NEXT:    vsetivli zero, 8, e64, m4, ta, ma
-; RV64-NEXT:    vfmv.s.f v12, fa5
-; RV64-NEXT:    vslideup.vi v8, v12, 7
+; RV64-NEXT:    viota.m v12, v0
+; RV64-NEXT:    vsll.vi v12, v12, 3, v0.t
+; RV64-NEXT:    vsetvli zero, zero, e64, m4, ta, mu
+; RV64-NEXT:    vluxei64.v v8, (a0), v12, v0.t
 ; RV64-NEXT:    ret
   %res = call <8 x double> @llvm.masked.expandload.v8f64(ptr align 8 %base, <8 x i1> %mask, <8 x double> %src0)
   ret <8 x double>%res
diff --git a/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-expandload-int.ll b/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-expandload-int.ll
index d34235127e8389..64ae0137beb85c 100644
--- a/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-expandload-int.ll
+++ b/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-expandload-int.ll
@@ -6,13 +6,9 @@ declare <1 x i8> @llvm.masked.expandload.v1i8(ptr, <1 x i1>, <1 x i8>)
 define <1 x i8> @expandload_v1i8(ptr %base, <1 x i8> %src0, <1 x i1> %mask) {
 ; CHECK-LABEL: expandload_v1i8:
 ; CHECK:       # %bb.0:
-; CHECK-NEXT:    vsetvli a1, zero, e8, mf8, ta, ma
-; CHECK-NEXT:    vfirst.m a1, v0
-; CHECK-NEXT:    bnez a1, .LBB0_2
-; CHECK-NEXT:  # %bb.1: # %cond.load
-; CHECK-NEXT:    vsetivli zero, 1, e8, mf8, ta, ma
-; CHECK-NEXT:    vle8.v v8, (a0)
-; CHECK-NEXT:  .LBB0_2: # %else
+; CHECK-NEXT:    vsetivli zero, 1, e8, mf8, ta, mu
+; CHECK-NEXT:    viota.m v9, v0
+; CHECK-NEXT:    vluxei8.v v8, (a0), v9, v0.t
 ; CHECK-NEXT:    ret
   %res = call <1 x i8> @llvm.masked.expandload.v1i8(ptr %base, <1 x i1> %mask, <1 x i8> %src0)
   ret <1 x i8>%res
@@ -22,28 +18,9 @@ declare <2 x i8> @llvm.masked.expandload.v2i8(ptr, <2 x i1>, <2 x i8>)
 define <2 x i8> @expandload_v2i8(ptr %base, <2 x i8> %src0, <2 x i1> %mask) {
 ; CHECK-LABEL: expandload_v2i8:
 ; CHECK:       # %bb.0:
-; CHECK-NEXT:    vsetivli zero, 1, e8, m1, ta, ma
-; CHECK-NEXT:    vmv.x.s a1, v0
-; CHECK-NEXT:    andi a2, a1, 1
-; CHECK-NEXT:    bnez a2, .LBB1_3
-; CHECK-NEXT:  # %bb.1: # %else
-; CHECK-NEXT:    andi a1, a1, 2
-; CHECK-NEXT:    bnez a1, .LBB1_4
-; CHECK-NEXT:  .LBB1_2: # %else2
-; CHECK-NEXT:    ret
-; CHECK-NEXT:  .LBB1_3: # %cond.load
-; CHECK-NEXT:    lbu a2, 0(a0)
-; CHECK-NEXT:    vsetvli zero, zero, e8, m1, tu, ma
-; CHECK-NEXT:    vmv.s.x v8, a2
-; CHECK-NEXT:    addi a0, a0, 1
-; CHECK-NEXT:    andi a1, a1, 2
-; CHECK-NEXT:    beqz a1, .LBB1_2
-; CHECK-NEXT:  .LBB1_4: # %cond.load1
-; CHECK-NEXT:    lbu a0, 0(a0)
-; CHECK-NEXT:    vsetvli zero, zero, e8, m1, ta, ma
-; CHECK-NEXT:    vmv.s.x v9, a0
-; CHECK-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
-; CHECK-NEXT:    vslideup.vi v8, v9, 1
+; CHECK-NEXT:    vsetivli zero, 2, e8, mf8, ta, mu
+; CHECK-NEXT:    viota.m v9, v0
+; CHECK-NEXT:    vluxei8.v v8, (a0), v9, v0.t
 ; CHECK-NEXT:    ret
   %res = call <2 x i8> @llvm.masked.expandload.v2i8(ptr %base, <2 x i1> %mask, <2 x i8> %src0)
   ret <2 x i8>%res
@@ -53,50 +30,9 @@ declare <4 x i8> @llvm.masked.expandload.v4i8(ptr, <4 x i1>, <4 x i8>)
 define <4 x i8> @expandload_v4i8(ptr %base, <4 x i8> %src0, <4 x i1> %mask) {
 ; CHECK-LABEL: expandload_v4i8:
 ; CHECK:       # %bb.0:
-; CHECK-NEXT:    vsetivli zero, 1, e8, m1, ta, ma
-; CHECK-NEXT:    vmv.x.s a1, v0
-; CHECK-NEXT:    andi a2, a1, 1
-; CHECK-NEXT:    bnez a2, .LBB2_5
-; CHECK-NEXT:  # %bb.1: # %else
-; CHECK-NEXT:    andi a2, a1, 2
-; CHECK-NEXT:    bnez a2, .LBB2_6
-; CHECK-NEXT:  .LBB2_2: # %else2
-; CHECK-NEXT:    andi a2, a1, 4
-; CHECK-NEXT:    bnez a2, .LBB2_7
-; CHECK-NEXT:  .LBB2_3: # %else6
-; CHECK-NEXT:    andi a1, a1, 8
-; CHECK-NEXT:    bnez a1, .LBB2_8
-; CHECK-NEXT:  .LBB2_4: # %else10
-; CHECK-NEXT:    ret
-; CHECK-NEXT:  .LBB2_5: # %cond.load
-; CHECK-NEXT:    lbu a2, 0(a0)
-; CHECK-NEXT:    vsetvli zero, zero, e8, m1, tu, ma
-; CHECK-NEXT:    vmv.s.x v8, a2
-; CHECK-NEXT:    addi a0, a0, 1
-; CHECK-NEXT:    andi a2, a1, 2
-; CHECK-NEXT:    beqz a2, .LBB2_2
-; CHECK-NEXT:  .LBB2_6: # %cond.load1
-; CHECK-NEXT:    lbu a2, 0(a0)
-; CHECK-NEXT:    vsetvli zero, zero, e8, m1, ta, ma
-; CHECK-NEXT:    vmv.s.x v9, a2
-; CHECK-NEXT:    vsetivli zero, 2, e8, mf4, tu, ma
-; CHECK-NEXT:    vslideup.vi v8, v9, 1
-; CHECK-NEXT:    addi a0, a0, 1
-; CHECK-NEXT:    andi a2, a1, 4
-; CHECK-NEXT:    beqz a2, .LBB2_3
-; CHECK-NEXT:  .LBB2_7: # %cond.load5
-; CHECK-NEXT:    lbu a2, 0(a0)
-; CHECK-NEXT:    vsetivli zero, 3, e8, mf4, tu, ma
-; CHECK-NEXT:    vmv.s.x v9, a2
-; CHECK-NEXT:    vslideup.vi v8, v9, 2
-; CHECK-NEXT:    addi a0, a0, 1
-; CHECK-NEXT:    andi a1, a1, 8
-; CHECK-NEXT:    beqz a1, .LBB2_4
-; CHECK-NEXT:  .LBB2_8: # %cond.load9
-; CHECK-NEXT:    lbu a0, 0(a0)
-; CHECK-NEXT:    vsetivli zero, 4, e8, mf4, ta, ma
-; CHECK-NEXT:    vmv.s.x v9, a0
-; CHECK-NEXT:    vslideup.vi v8, v9, 3
+; CHECK-NEXT:    vsetivli zero, 4, e8, mf4, ta, mu
+; CHECK-NEXT:    viota.m v9, v0
+; CHECK-NEXT:    vluxei8.v v8, (a0), v9, v0.t
 ; CHECK-NEXT:    ret
   %res = call <4 x i8> @llvm.masked.expandload.v4i8(ptr %base, <4 x i1> %mask, <4 x i8> %src0)
   ret <4 x i8>%res
@@ -106,94 +42,9 @@ declare <8 x i8> @llvm.masked.expandload.v8i8(ptr, <8 x i1>, <8 x i8>)
 define <8 x i8> @expandload_v8i8(ptr %base, <8 x i8> %src0, <8 x i1> %mask) {
 ; CHECK-LABEL: expandload_v8i8:
 ; CHECK:       # %bb.0:
-; CHECK-NEXT:    vsetivli zero, 1, e8, m1, ta, ma
-; CHECK-NEXT:    vmv.x.s a1, v0
-; CHECK-NEXT:    andi a2, a1, 1
-; CHECK-NEXT:    bnez a2, .LBB3_9
-; CHECK-NEXT:  # %bb.1: # %else
-; CHECK-NEXT:    andi a2, a1, 2
-; CHECK-NEXT:    bnez a2, .LBB3_10
-; CHECK-NEXT:  .LBB3_2: # %else2
-; CHECK-NEXT:    andi a2, a1, 4
-; CHECK-NEXT:    bnez a2, .LBB3_11
-; CHECK-NEXT:  .LBB3_3: # %else6
-; CHECK-NEXT:    andi a2, a1, 8
-; CHECK-NEXT:    bnez a2, .LBB3_12
-; CHECK-NEXT:  .LBB3_4: # %else10
-; CHECK-NEXT:    andi a2, a1, 16
-; CHECK-NEXT:    bnez a2, .LBB3_13
-; CHECK-NEXT:  .LBB3_5: # %else14
-; CHECK-NEXT:    andi a2, a1, 32
-; CHECK-NEXT:    bnez a2, .LBB3_14
-; CHECK-NEXT:  .LBB3_6: # %else18
-; CHECK-NEXT:    andi a2, a1, 64
-; CHECK-NEXT:    bnez a2, .LBB3_15
-; CHECK-NEXT:  .LBB3_7: # %else22
-; CHECK-NEXT:    andi a1, a1, -128
-; CHECK-NEXT:    bnez a1, .LBB3_16
-; CHECK-NEXT:  .LBB3_8: # %else26
-; CHECK-NEXT:    ret
-; CHECK-NEXT:  .LBB3_9: # %cond.load
-; CHECK-NEXT:    lbu a2, 0(a0)
-; CHECK-NEXT:    vsetvli zero, zero, e8, m1, tu, ma
-; CHECK-NEXT:    vmv.s.x v8, a2
-; CHECK-NEXT:    addi a0, a0, 1
-; CHECK-NEXT:    andi a2, a1, 2
-; CHECK-NEXT:    beqz a2, .LBB3_2
-; CHECK-NEXT:  .LBB3_10: # %cond.load1
-; CHECK-NEXT:    lbu a2, 0(a0)
-; CHECK-NEXT:    vsetvli zero, zero, e8, m1, ta, ma
-; CHECK-NEXT:    vmv.s.x v9, a2
-; CHECK-NEXT:    vsetivli zero, 2, e8, mf2, tu, ma
-; CHECK-NEXT:    vslideup.vi v8, v9, 1
-; CHECK-NEXT:    addi a0, a0, 1
-; CHECK-NEXT:    andi a2, a1, 4
-; CHECK-NEXT:    beqz a2, .LBB3_3
-; CHECK-NEXT:  .LBB3_11: # %cond.load5
-; CHECK-NEXT:    lbu a2, 0(a0)
-; CHECK-NEXT:    vsetivli zero, 3, e8, mf2, tu, ma
-; CHECK-NEXT:    vmv.s.x v9, a2
-; CHECK-NEXT:    vslideup.vi v8, v9, 2
-; CHECK-NEXT:    addi a0, a0, 1
-; CHECK-NEXT:    andi a2, a1, 8
-; CHECK-NEXT:    beqz a2, .LBB3_4
-; CHECK-NEXT:  .LBB3_12: # %cond.load9
-; CHECK-NEXT:    lbu a2, 0(a0)
-; CHECK-NEXT:    vsetivli zero, 4, e8, mf2, tu, ma
-; CHECK-NEXT:    vmv.s.x v9, a2
-; CHECK-NEXT:    vslideup.vi v8, v9, 3
-; CHECK-NEXT:    addi a0, a0, 1
-; CHECK-NEXT:    andi a2, a1, 16
-; CHECK-NEXT:    beqz a2, .LBB3_5
-; CHECK-NEXT:  .LBB3_13: # %cond.load13
-; CHECK-NEXT:    lbu a2, 0(a0)
-; CHECK-NEXT:    vsetivli zero, 5, e8, mf2, tu, ma
-; CHECK-NEXT:    vmv.s.x v9, a2
-; CHECK-NEXT:    vslideup.vi v8, v9, 4
-; CHECK-NEXT:    addi a0, a0, 1
-; CHECK-NEXT:    andi a2, a1, 32
-; CHECK-NEXT:    beqz a2, .LBB3_6
-; CHECK-NEXT:  .LBB3_14: # %cond.load17
-; CHECK-NEXT:    lbu a2, 0(a0)
-; CHECK-NEXT:    vsetivli zero, 6, e8, mf2, tu, ma
-; CHECK-NEXT:    vmv.s.x v9, a2
-; CHECK-NEXT:    vslideup.vi v8, v9, 5
-; CHECK-NEXT:    addi a0, a0, 1
-; CHECK-NEXT:    andi a2, a1, 64
-; CHECK-NEXT:    beqz a2, .LBB3_7
-; CHECK-NEXT:  .LBB3_15: # %cond.load21
-; CHECK-NEXT:    lbu a2, 0(a0)
-; CHECK-NEXT:    vsetivli zero, 7, e8, mf2, tu, ma
-; CHECK-NEXT:    vmv.s.x v9, a2
-; CHECK-NEXT:    vslideup.vi v8, v9, 6
-; CHECK-NEXT:    addi a0, a0, 1
-; CHECK-NEXT:    andi a1, a1, -128
-; CHECK-NEXT:    beqz a1, .LBB3_8
-; CHECK-NEXT:  .LBB3_16: # %cond.load25
-; CHECK-NEXT:    lbu a0, 0(a0)
-; CHECK-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
-; CHECK-NEXT:    vmv.s.x v9, a0
-; CHECK-NEXT:    vslideup.vi v8, v9, 7
+; CHECK-NEXT:    vsetivli zero, 8, e8, mf2, ta, mu
+; CHECK-NEXT:    viota.m v9, v0
+; CHECK-NEXT:    vluxei8.v v8, (a0), v9, v0.t
 ; CHECK-NEXT:    ret
   %res = call <8 x i8> @llvm.masked.expandload.v8i8(ptr %base, <8 x i1> %mask, <8 x i8> %src0)
   ret <8 x i8>%res
@@ -203,13 +54,11 @@ declare <1 x i16> @llvm.masked.expandload.v1i16(ptr, <1 x i1>, <1 x i16>)
 define <1 x i16> @expandload_v1i16(ptr %base, <1 x i16> %src0, <1 x i1> %mask) {
 ; CHECK-LABEL: expandload_v1i16:
 ; CHECK:       # %bb.0:
-; CHECK-NEXT:    vsetvli a1, zero, e8, mf8, ta, ma
-; CHECK-NEXT:    vfirst.m a1, v0
-; CHECK-NEXT:    bnez a1, .LBB4_2
-; CHECK-NEXT:  # %bb.1: # %cond.load
 ; CHECK-NEXT:    vsetivli zero, 1, e16, mf4, ta, ma
-; CHECK-NEXT:    vle16.v v8, (a0)
-; CHECK-NEXT:  .LBB4_2: # %else
+; CHECK-NEXT:    viota.m v9, v0
+; CHECK-NEXT:    vsll.vi v9, v9, 1, v0.t
+; CHECK-NEXT:    vsetvli zero, zero, e16, mf4, ta, mu
+; CHECK-NEXT:    vluxei16.v v8, (a0), v9, v0.t
 ; CHECK-NEXT:    ret
   %res = call <1 x i16> @llvm.masked.expandload.v1i16(ptr align 2 %base, <1 x i1> %mask, <1 x i16> %src0)
   ret <1 x i16>%res
@@ -219,28 +68,11 @@ declare <2 x i16> @llvm.masked.expandload.v2i16(ptr, <2 x i1>, <2 x i16>)
 define <2 x i16> @expandload_v2i16(ptr %base, <2 x i16> %src0, <2 x i1> %mask) {
 ; CHECK-LABEL: expandload_v2i16:
 ; CHECK:       # %bb.0:
-; CHECK-NEXT:    vsetivli zero, 1, e8, m1, ta, ma
-; CHECK-NEXT:    vmv.x.s a1, v0
-; CHECK-NEXT:    andi a2, a1, 1
-; CHECK-NEXT:    bnez a2, .LBB5_3
-; CHECK-NEXT:  # %bb.1: # %else
-; CHECK-NEXT:    andi a1, a1, 2
-; CHECK-NEXT:    bnez a1, .LBB5_4
-; CHECK-NEXT:  .LBB5_2: # %else2
-; CHECK-NEXT:    ret
-; CHECK-NEXT:  .LBB5_3: # %cond.load
-; CHECK-NEXT:    lh a2, 0(a0)
-; CHECK-NEXT:    vsetvli zero, zero, e16, m2, tu, ma
-; CHECK-NEXT:    vmv.s.x v8, a2
-; CHECK-NEXT:    addi a0, a0, 2
-; CHECK-NEXT:    andi a1, a1, 2
-; CHECK-NEXT:    beqz a1, .LBB5_2
-; CHECK-NEXT:  .LBB5_4: # %cond.load1
-; CHECK-NEXT:    lh a0, 0(a0)
-; CHECK-NEXT:    vsetvli zero, zero, e16, m2, ta, ma
-; CHECK-NEXT:    vmv.s.x v9, a0
 ; CHECK-NEXT:    vsetivli zero, 2, e16, mf4, ta, ma
-; CHECK-NEXT:    vslideup.vi v8, v9, 1
+; CHECK-NEXT:    viota.m v9, v0
+; CHECK-NEXT:    vsll.vi v9, v9, 1, v0.t
+; CHECK-NEXT:    vsetvli zero, zero, e16, mf4, ta, mu
+; CHECK-NEXT:    vluxei16.v v8, (a0), v9, v0.t
 ; CHECK-NEXT:    ret
   %res = call <2 x i16> @llvm.masked.expandload.v2i16(ptr align 2 %base, <2 x i1> %mask, <2 x i16> %src0)
   ret <2 x i16>%res
@@ -250,50 +82,11 @@ declare <4 x i16> @llvm.masked.expandload.v4i16(ptr, <4 x i1>, <4 x i16>)
 define <4 x i16> @expandload_v4i16(ptr %base, <4 x i16> %src0, <4 x i1> %mask) {
 ; CHECK-LABEL: expandload_v4i16:
 ; CHECK:       # %bb.0:
-; CHECK-NEXT:    vsetivli zero, 1, e8, m1, ta, ma
-; CHECK-NEXT:    vmv.x.s a1, v0
-; CHECK-NEXT:    andi a2, a1, 1
-; CHECK-NEXT:    bnez a2, .LBB6_5
-; CHECK-NEXT:  # %bb.1: # %else
-; CHECK-NEXT:    andi a2, a1, 2
-; CHECK-NEXT:    bnez a2, .LBB6_6
-; CHECK-NEXT:  .LBB6_2: # %else2
-; CHECK-NEXT:    andi a2, a1, 4
-; CHECK-NEXT:    bnez a2, .LBB6_7
-; CHECK-NEXT:  .LBB6_3: # %else6
-; CHECK-NEXT:    andi a1, a1, 8
-; CHECK-NEXT:    bnez a1, .LBB6_8
-; CHECK-NEXT:  .LBB6_4: # %else10
-; CHECK-NEXT:    ret
-; CHECK-NEXT:  .LBB6_5: # %cond.load
-; CHECK-NEXT:    lh a2, 0(a0)
-; CHECK-NEXT:    vsetvli zero, zero, e16, m2, tu, ma
-; CHECK-NEXT:    vmv.s.x v8, a2
-; CHECK-NEXT:    addi a0, a0, 2
-; CHECK-NEXT:    andi a2, a1, 2
-; CHECK-NEXT:    beqz a2, .LBB6_2
-; CHECK-NEXT:  .LBB6_6: # %cond.load1
-; CHECK-NEXT:    lh a2, 0(a0)
-; CHECK-NEXT:    vsetvli zero, zero, e16, m2, ta, ma
-; CHECK-NEXT:    vmv.s.x v9, a2
-; CHECK-NEXT:    vsetivli zero, 2, e16, mf2, tu, ma
-; CHECK-NEXT:    vslideup.vi v8, v9, 1
-; CHECK-NEXT:    addi a0, a0, 2
-; CHECK-NEXT:    andi a2, a1, 4
-; CHECK-NEXT:    beqz a2, .LBB6_3
-; CHECK-NEXT:  .LBB6_7: # %cond.load5
-; CHECK-NEXT:    lh a2, 0(a0)
-; CHECK-NEXT:    vsetivli zero, 3, e16, mf2, tu, ma
-; CHECK-NEXT:    vmv.s.x v9, a2
-; CHECK-NEXT:    vslideup.vi v8, v9, 2
-; CHECK-NEXT:    addi a0, a0, 2
-; CHECK-NEXT:    andi a1, a1, 8
-; CHECK-NEXT:    beqz a1, .LBB6_4
-; CHECK-NEXT:  .LBB6_8: # %cond.load9
-; CHECK-NEXT:    lh a0, 0(a0)
 ; CHECK-NEXT:    vsetivli zero, 4, e16, mf2, ta, ma
-; CHECK-NEXT:    vmv.s.x v9, a0
-; CHECK-NEXT:    vslideup.vi v8, v9, 3
+; CHECK-NEXT:    viota.m v9, v0
+; CHECK-NEXT:    vsll.vi v9, v9, 1, v0.t
+; CHECK-NEXT:    vsetvli zero, zero, e16, mf2, ta, mu
+; CHECK-NEXT:    vluxei16.v v8, (a0), v9, v0.t
 ; CHECK-NEXT:    ret
   %res = call <4 x i16> @llvm.masked.expandload.v4i16(ptr align 2 %base, <4 x i1> %mask, <4 x i16> %src0)
   ret <4 x i16>%res
@@ -303,94 +96,11 @@ declare <8 x i16> @llvm.masked.expandload.v8i16(ptr, <8 x i1>, <8 x i16>)
 define <8 x i16> @expandload_v8i16(ptr %base, <8 x i16> %src0, <8 x i1> %mask) {
 ; CHECK-LABEL: expandload_v8i16:
 ; CHECK:       # %bb.0:
-; CHECK-NEXT:    vsetivli zero, 1, e8, m1, ta, ma
-; CHECK-NEXT:    vmv.x.s a1, v0
-; CHECK-NEXT:    andi a2, a1, 1
-; CHECK-NEXT:    bnez a2, .LBB7_9
-; CHECK-NEXT:  # %bb.1: # %else
-; CHECK-NEXT:    andi a2, a1, 2
-; CHECK-NEXT:    bnez a2, .LBB7_10
-; CHECK-NEXT:  .LBB7_2: # %else2
-; CHECK-NEXT:    andi a2, a1, 4
-; CHECK-NEXT:    bnez a2, .LBB7_11
-; CHECK-NEXT:  .LBB7_3: # %else6
-; CHECK-NEXT:    andi a2, a1, 8
-; CHECK-NEXT:    bnez a2, .LBB7_12
-; CHECK-NEXT:  .LBB7_4: # %else10
-; CHECK-NEXT:    andi a2, a1, 16
-; CHECK-NEXT:    bnez a2, .LBB7_13
-; CHECK-NEXT:  .LBB7_5: # %else14
-; CHECK-NEXT:    andi a2, a1, 32
-; CHECK-NEXT:    bnez a2, .LBB7_14
-; CHECK-NEXT:  .LBB7_6: # %else18
-; CHECK-NEXT:    andi a2, a1, 64
-; CHECK-NEXT:    bnez a2, .LBB7_15
-; CHECK-NEXT:  .LBB7_7: # %else22
-; CHECK-NEXT:    andi a1, a1, -128
-; CHECK-NEXT:    bnez a1, .LBB7_16
-; CHECK-NEXT:  .LBB7_8: # %else26
-; CHECK-NEXT:    ret
-; CHECK-NEXT:  .LBB7_9: # %cond.load
-; CHECK-NEXT:    lh a2, 0(a0)
-; CHECK-NEXT:    vsetvli zero, zero, e16, m2, tu, ma
-; CHECK-NEXT:    vmv.s.x v8, a2
-; CHECK-NEXT:    addi a0, a0, 2
-; CHECK-NEXT:    andi a2, a1, 2
-; CHECK-NEXT:    beqz a2, .LBB7_2
-; CHECK-NEXT:  .LBB7_10: # %cond.load1
-; CHECK-NEXT:    lh a2, 0(a0)
-; CHECK-NEXT:    vsetvli zero, zero, e16, m2, ta, ma
-; CHECK-NEXT:    vmv.s.x v9, a2
-; CHECK-NEXT:    vsetivli zero, 2, e16, m1, tu, ma
-; CHECK-NEXT:    vslideup.vi v8, v9, 1
-; CHECK-NEXT:    addi a0, a0, 2
-; CHECK-NEXT:    andi a2, a1, 4
-; CHECK-NEXT:    beqz a2, .LBB7_3
-; CHECK-NEXT:  .LBB7_11: # %cond.load5
-; CHECK-NEXT:    lh a2, 0(a0)
-; CHECK-NEXT:    vsetivli zero, 3, e16, m1, tu, ma
-; CHECK-NEXT:    vmv.s.x v9, a2
-; CHECK-NEXT:    vslideup.vi v8, v9, 2
-; CHECK-NEXT:    addi a0, a0, 2
-; CHECK-NEXT:    andi a2, a1, 8
-; CHECK-NEXT:    beqz a2, .LBB7_4
-; CHECK-NEXT:  .LBB7_12: # %cond.load9
-; CHECK-NEXT:    lh a2, 0(a0)
-; CHECK-NEXT:    vsetivli zero, 4, e16, m1, tu, ma
-; CHECK-NEXT:    vmv.s.x v9, a2
-; CHECK-NEXT:    vslideup.vi v8, v9, 3
-; CHECK-NEXT:    addi a0, a0, 2
-; CHECK-NEXT:    andi a2, a1, 16
-; CHECK-NEXT:    beqz a2, .LBB7_5
-; CHECK-NEXT:  .LBB7_13: # %cond.load13
-; CHECK-NEXT:    lh a2, 0(a0)
-; CHECK-NEXT:    vsetivli zero, 5, e16, m1, tu, ma
-; CHECK-NEXT:    vmv.s.x v9, a2
-; CHECK-NEXT:    vslideup.vi v8, v9, 4
-; CHECK-NEXT:    addi a0, a0, 2
-; CHECK-NEXT:    andi a2, a1, 32
-; CHECK-NEXT:    beqz a2, .LBB7_6
-; CHECK-NEXT:  .LBB7_14: # %cond.load17
-; CHECK-NEXT:    lh a2, 0(a0)
-; CHECK-NEXT:    vsetivli zero, 6, e16, m1, tu, ma
-; CHECK-NEXT:    vmv.s.x v9, a2
-; CHECK-NEXT:    vslideup.vi v8, v9, 5
-; CHECK-NEXT:    addi a0, a0, 2
-; CHECK-NEXT:    andi a2, a1, 64
-; CHECK-NEXT:    beqz a2, .LBB7_7
-; CHECK-NEXT:  .LBB7_15: # %cond.load21
-; CHECK-NEXT:    lh a2, 0(a0)
-; CHECK-NEXT:    vsetivli zero, 7, e16, m1, tu, ma
-; CHECK-NEXT:    vmv.s.x v9, a2
-; CHECK-NEXT:    vslideup.vi v8, v9, 6
-; CHECK-NEXT:    addi a0, a0, 2
-; CHECK-NEXT:    andi a1, a1, -128
-; CHECK-NEXT:    beqz a1, .LBB7_8
-; CHECK-NEXT:  .LBB7_16: # %cond.load25
-; CHECK-NEXT:    lh a0, 0(a0)
 ; CHECK-NEXT:    vsetivli zero, 8, e16, m1, ta, ma
-; CHECK-NEXT:    vmv.s.x v9, a0
-; CHECK-NEXT:    vslideup.vi v8, v9, 7
+; CHECK-NEXT:    viota.m v9, v0
+; CHECK-NEXT:    vsll.vi v9, v9, 1, v0.t
+; CHECK-NEXT:    vsetvli zero, zero, e16, m1, ta, mu
+; CHECK-NEXT:    vluxei16.v v8, (a0), v9, v0.t
 ; CHECK-NEXT:    ret
   %res = call <8 x i16> @llvm.masked.expandload.v8i16(ptr align 2 %base, <8 x i1> %mask, <8 x i16> %src0)
   ret <8 x i16>%res
@@ -400,13 +110,11 @@ declare <1 x i32> @llvm.masked.expandload.v1i32(ptr, <1 x i1>, <1 x i32>)
 define <1 x i32> @expandload_v1i32(ptr %base, <1 x i32> %src0, <1 x i1> %mask) {
 ; CHECK-LABEL: expandload_v1i32:
 ; CHECK:       # %bb.0:
-; CHECK-NEXT:    vsetvli a1, zero, e8, mf8, ta, ma
-; CHECK-NEXT:    vfirst.m a1, v0
-; CHECK-NEXT:    bnez a1, .LBB8_2
-; CHECK-NEXT:  # %bb.1: # %cond.load
 ; CHECK-NEXT:    vsetivli zero, 1, e32, mf2, ta, ma
-; CHECK-NEXT:    vle32.v v8, (a0)
-; CHECK-NEXT:  .LBB8_2: # %else
+; CHECK-NEXT:    viota.m v9, v0
+; CHECK-NEXT:    vsll.vi v9, v9, 2, v0.t
+; CHECK-NEXT:    vsetvli zero, zero, e32, mf2, ta, mu
+; CHECK-NEXT:    vluxei32.v v8, (a0), v9, v0.t
 ; CHECK-NEXT:    ret
   %res = call <1 x i32> @llvm.masked.expandload.v1i32(ptr align 4 %base, <1 x i1> %mask, <1 x i32> %src0)
   ret <1 x i32>%res
@@ -416,28 +124,11 @@ declare <2 x i32> @llvm.masked.expandload.v2i32(ptr, <2 x i1>, <2 x i32>)
 define <2 x i32> @expandload_v2i32(ptr %base, <2 x i32> %src0, <2 x i1> %mask) {
 ; CHECK-LABEL: expandload_v2i32:
 ; CHECK:       # %bb.0:
-; CHECK-NEXT:    vsetivli zero, 1, e8, m1, ta, ma
-; CHECK-NEXT:    vmv.x.s a1, v0
-; CHECK-NEXT:    andi a2, a1, 1
-; CHECK-NEXT:    bnez a2, .LBB9_3
-; CHECK-NEXT:  # %bb.1: # %else
-; CHECK-NEXT:    andi a1, a1, 2
-; CHECK-NEXT:    bnez a1, .LBB9_4
-; CHECK-NEXT:  .LBB9_2: # %else2
-; CHECK-NEXT:    ret
-; CHECK-NEXT:  .LBB9_3: # %cond.load
-; CHECK-NEXT:    lw a2, 0(a0)
-; CHECK-NEXT:    vsetvli zero, zero, e32, m4, tu, ma
-; CHECK-NEXT:    vmv.s.x v8, a2
-; CHECK-NEXT:    addi a0, a0, 4
-; CHECK-NEXT:    andi a1, a1, 2
-; CHECK-NEXT:    beqz a1, .LBB9_2
-; CHECK-NEXT:  .LBB9_4: # %cond.load1
-; CHECK-NEXT:    lw a0, 0(a0)
-; CHECK-NEXT:    vsetvli zero, zero, e32, m4, ta, ma
-; CHECK-NEXT:    vmv.s.x v9, a0
 ; CHECK-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
-; CHECK-NEXT:    vslideup.vi v8, v9, 1
+; CHECK-NEXT:    viota.m v9, v0
+; CHECK-NEXT:    vsll.vi v9, v9, 2, v0.t
+; CHECK-NEXT:    vsetvli zero, zero, e32, mf2, ta, mu
+; CHECK-NEXT:    vluxei32.v v8, (a0), v9, v0.t
 ; CHECK-NEXT:    ret
   %res = call <2 x i32> @llvm.masked.expandload.v2i32(ptr align 4 %base, <2 x i1> %mask, <2 x i32> %src0)
   ret <2 x i32>%res
@@ -447,50 +138,11 @@ declare <4 x i32> @llvm.masked.expandload.v4i32(ptr, <4 x i1>, <4 x i32>)
 define <4 x i32> @expandload_v4i32(ptr %base, <4 x i32> %src0, <4 x i1> %mask) {
 ; CHECK-LABEL: expandload_v4i32:
 ; CHECK:       # %bb.0:
-; CHECK-NEXT:    vsetivli zero, 1, e8, m1, ta, ma
-; CHECK-NEXT:    vmv.x.s a1, v0
-; CHECK-NEXT:    andi a2, a1, 1
-; CHECK-NEXT:    bnez a2, .LBB10_5
-; CHECK-NEXT:  # %bb.1: # %else
-; CHECK-NEXT:    andi a2, a1, 2
-; CHECK-NEXT:    bnez a2, .LBB10_6
-; CHECK-NEXT:  .LBB10_2: # %else2
-; CHECK-NEXT:    andi a2, a1, 4
-; CHECK-NEXT:    bnez a2, .LBB10_7
-; CHECK-NEXT:  .LBB10_3: # %else6
-; CHECK-NEXT:    andi a1, a1, 8
-; CHECK-NEXT:    bnez a1, .LBB10_8
-; CHECK-NEXT:  .LBB10_4: # %else10
-; CHECK-NEXT:    ret
-; CHECK-NEXT:  .LBB10_5: # %cond.load
-; CHECK-NEXT:    lw a2, 0(a0)
-; CHECK-NEXT:    vsetvli zero, zero, e32, m4, tu, ma
-; CHECK-NEXT:    vmv.s.x v8, a2
-; CHECK-NEXT:    addi a0, a0, 4
-; CHECK-NEXT:    andi a2, a1, 2
-; CHECK-NEXT:    beqz a2, .LBB10_2
-; CHECK-NEXT:  .LBB10_6: # %cond.load1
-; CHECK-NEXT:    lw a2, 0(a0)
-; CHECK-NEXT:    vsetvli zero, zero, e32, m4, ta, ma
-; CHECK-NEXT:    vmv.s.x v9, a2
-; CHECK-NEXT:    vsetivli zero, 2, e32, m1, tu, ma
-; CHECK-NEXT:    vslideup.vi v8, v9, 1
-; CHECK-NEXT:    addi a0, a0, 4
-; CHECK-NEXT:    andi a2, a1, 4
-; CHECK-NEXT:    beqz a2, .LBB10_3
-; CHECK-NEXT:  .LBB10_7: # %cond.load5
-; CHECK-NEXT:    lw a2, 0(a0)
-; CHECK-NEXT:    vsetivli zero, 3, e32, m1, tu, ma
-; CHECK-NEXT:    vmv.s.x v9, a2
-; CHECK-NEXT:    vslideup.vi v8, v9, 2
-; CHECK-NEXT:    addi a0, a0, 4
-; CHECK-NEXT:    andi a1, a1, 8
-; CHECK-NEXT:    beqz a1, .LBB10_4
-; CHECK-NEXT:  .LBB10_8: # %cond.load9
-; CHECK-NEXT:    lw a0, 0(a0)
 ; CHECK-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
-; CHECK-NEXT:    vmv.s.x v9, a0
-; CHECK-NEXT:    vslideup.vi v8, v9, 3
+; CHECK-NEXT:    viota.m v9, v0
+; CHECK-NEXT:    vsll.vi v9, v9, 2, v0.t
+; CHECK-NEXT:    vsetvli zero, zero, e32, m1, ta, mu
+; CHECK-NEXT:    vluxei32.v v8, (a0), v9, v0.t
 ; CHECK-NEXT:    ret
   %res = call <4 x i32> @llvm.masked.expandload.v4i32(ptr align 4 %base, <4 x i1> %mask, <4 x i32> %src0)
   ret <4 x i32>%res
@@ -500,94 +152,11 @@ declare <8 x i32> @llvm.masked.expandload.v8i32(ptr, <8 x i1>, <8 x i32>)
 define <8 x i32> @expandload_v8i32(ptr %base, <8 x i32> %src0, <8 x i1> %mask) {
 ; CHECK-LABEL: expandload_v8i32:
 ; CHECK:       # %bb.0:
-; CHECK-NEXT:    vsetivli zero, 1, e8, m1, ta, ma
-; CHECK-NEXT:    vmv.x.s a1, v0
-; CHECK-NEXT:    andi a2, a1, 1
-; CHECK-NEXT:    bnez a2, .LBB11_9
-; CHECK-NEXT:  # %bb.1: # %else
-; CHECK-NEXT:    andi a2, a1, 2
-; CHECK-NEXT:    bnez a2, .LBB11_10
-; CHECK-NEXT:  .LBB11_2: # %else2
-; CHECK-NEXT:    andi a2, a1, 4
-; CHECK-NEXT:    bnez a2, .LBB11_11
-; CHECK-NEXT:  .LBB11_3: # %else6
-; CHECK-NEXT:    andi a2, a1, 8
-; CHECK-NEXT:    bnez a2, .LBB11_12
-; CHECK-NEXT:  .LBB11_4: # %else10
-; CHECK-NEXT:    andi a2, a1, 16
-; CHECK-NEXT:    bnez a2, .LBB11_13
-; CHECK-NEXT:  .LBB11_5: # %else14
-; CHECK-NEXT:    andi a2, a1, 32
-; CHECK-NEXT:    bnez a2, .LBB11_14
-; CHECK-NEXT:  .LBB11_6: # %else18
-; CHECK-NEXT:    andi a2, a1, 64
-; CHECK-NEXT:    bnez a2, .LBB11_15
-; CHECK-NEXT:  .LBB11_7: # %else22
-; CHECK-NEXT:    andi a1, a1, -128
-; CHECK-NEXT:    bnez a1, .LBB11_16
-; CHECK-NEXT:  .LBB11_8: # %else26
-; CHECK-NEXT:    ret
-; CHECK-NEXT:  .LBB11_9: # %cond.load
-; CHECK-NEXT:    lw a2, 0(a0)
-; CHECK-NEXT:    vsetvli zero, zero, e32, m4, tu, ma
-; CHECK-NEXT:    vmv.s.x v8, a2
-; CHECK-NEXT:    addi a0, a0, 4
-; CHECK-NEXT:    andi a2, a1, 2
-; CHECK-NEXT:    beqz a2, .LBB11_2
-; CHECK-NEXT:  .LBB11_10: # %cond.load1
-; CHECK-NEXT:    lw a2, 0(a0)
-; CHECK-NEXT:    vsetvli zero, zero, e32, m4, ta, ma
-; CHECK-NEXT:    vmv.s.x v10, a2
-; CHECK-NEXT:    vsetivli zero, 2, e32, m1, tu, ma
-; CHECK-NEXT:    vslideup.vi v8, v10, 1
-; CHECK-NEXT:    addi a0, a0, 4
-; CHECK-NEXT:    andi a2, a1, 4
-; CHECK-NEXT:    beqz a2, .LBB11_3
-; CHECK-NEXT:  .LBB11_11: # %cond.load5
-; CHECK-NEXT:    lw a2, 0(a0)
-; CHECK-NEXT:    vsetivli zero, 3, e32, m1, tu, ma
-; CHECK-NEXT:    vmv.s.x v10, a2
-; CHECK-NEXT:    vslideup.vi v8, v10, 2
-; CHECK-NEXT:    addi a0, a0, 4
-; CHECK-NEXT:    andi a2, a1, 8
-; CHECK-NEXT:    beqz a2, .LBB11_4
-; CHECK-NEXT:  .LBB11_12: # %cond.load9
-; CHECK-NEXT:    lw a2, 0(a0)
-; CHECK-NEXT:    vsetivli zero, 4, e32, m1, tu, ma
-; CHECK-NEXT:    vmv.s.x v10, a2
-; CHECK-NEXT:    vslideup.vi v8, v10, 3
-; CHECK-NEXT:    addi a0, a0, 4
-; CHECK-NEXT:    andi a2, a1, 16
-; CHECK-NEXT:    beqz a2, .LBB11_5
-; CHECK-NEXT:  .LBB11_13: # %cond.load13
-; CHECK-NEXT:    lw a2, 0(a0)
-; CHECK-NEXT:    vsetivli zero, 5, e32, m2, tu, ma
-; CHECK-NEXT:    vmv.s.x v10, a2
-; CHECK-NEXT:    vslideup.vi v8, v10, 4
-; CHECK-NEXT:    addi a0, a0, 4
-; CHECK-NEXT:    andi a2, a1, 32
-; CHECK-NEXT:    beqz a2, .LBB11_6
-; CHECK-NEXT:  .LBB11_14: # %cond.load17
-; CHECK-NEXT:    lw a2, 0(a0)
-; CHECK-NEXT:    vsetivli zero, 6, e32, m2, tu, ma
-; CHECK-NEXT:    vmv.s.x v10, a2
-; CHECK-NEXT:    vslideup.vi v8, v10, 5
-; CHECK-NEXT:    addi a0, a0, 4
-; CHECK-NEXT:    andi a2, a1, 64
-; CHECK-NEXT:    beqz a2, .LBB11_7
-; CHECK-NEXT:  .LBB11_15: # %cond.load21
-; CHECK-NEXT:    lw a2, 0(a0)
-; CHECK-NEXT:    vsetivli zero, 7, e32, m2, tu, ma
-; CHECK-NEXT:    vmv.s.x v10, a2
-; CHECK-NEXT:    vslideup.vi v8, v10, 6
-; CHECK-NEXT:    addi a0, a0, 4
-; CHECK-NEXT:    andi a1, a1, -128
-; CHECK-NEXT:    beqz a1, .LBB11_8
-; CHECK-NEXT:  .LBB11_16: # %cond.load25
-; CHECK-NEXT:    lw a0, 0(a0)
 ; CHECK-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
-; CHECK-NEXT:    vmv.s.x v10, a0
-; CHECK-NEXT:    vslideup.vi v8, v10, 7
+; CHECK-NEXT:    viota.m v10, v0
+; CHECK-NEXT:    vsll.vi v10, v10, 2, v0.t
+; CHECK-NEXT:    vsetvli zero, zero, e32, m2, ta, mu
+; CHECK-NEXT:    vluxei32.v v8, (a0), v10, v0.t
 ; CHECK-NEXT:    ret
   %res = call <8 x i32> @llvm.masked.expandload.v8i32(ptr align 4 %base, <8 x i1> %mask, <8 x i32> %src0)
   ret <8 x i32>%res
@@ -616,13 +185,11 @@ define <1 x i64> @expandload_v1i64(ptr %base, <1 x i64> %src0, <1 x i1> %mask) {
 ;
 ; RV64-LABEL: expandload_v1i64:
 ; RV64:       # %bb.0:
-; RV64-NEXT:    vsetvli a1, zero, e8, mf8, ta, ma
-; RV64-NEXT:    vfirst.m a1, v0
-; RV64-NEXT:    bnez a1, .LBB12_2
-; RV64-NEXT:  # %bb.1: # %cond.load
 ; RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; RV64-NEXT:    vle64.v v8, (a0)
-; RV64-NEXT:  .LBB12_2: # %else
+; RV64-NEXT:    viota.m v9, v0
+; RV64-NEXT:    vsll.vi v9, v9, 3, v0.t
+; RV64-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
+; RV64-NEXT:    vluxei64.v v8, (a0), v9, v0.t
 ; RV64-NEXT:    ret
   %res = call <1 x i64> @llvm.masked.expandload.v1i64(ptr align 8 %base, <1 x i1> %mask, <1 x i64> %src0)
   ret <1 x i64>%res
@@ -632,58 +199,20 @@ declare <2 x i64> @llvm.masked.expandload.v2i64(ptr, <2 x i1>, <2 x i64>)
 define <2 x i64> @expandload_v2i64(ptr %base, <2 x i64> %src0, <2 x i1> %mask) {
 ; RV32-LABEL: expandload_v2i64:
 ; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 1, e8, m1, ta, ma
-; RV32-NEXT:    vmv.x.s a1, v0
-; RV32-NEXT:    andi a2, a1, 1
-; RV32-NEXT:    bnez a2, .LBB13_3
-; RV32-NEXT:  # %bb.1: # %else
-; RV32-NEXT:    andi a1, a1, 2
-; RV32-NEXT:    bnez a1, .LBB13_4
-; RV32-NEXT:  .LBB13_2: # %else2
-; RV32-NEXT:    ret
-; RV32-NEXT:  .LBB13_3: # %cond.load
-; RV32-NEXT:    lw a2, 0(a0)
-; RV32-NEXT:    lw a3, 4(a0)
-; RV32-NEXT:    vsetivli zero, 2, e32, m1, tu, ma
-; RV32-NEXT:    vslide1down.vx v8, v8, a2
-; RV32-NEXT:    vslide1down.vx v8, v8, a3
-; RV32-NEXT:    addi a0, a0, 8
-; RV32-NEXT:    andi a1, a1, 2
-; RV32-NEXT:    beqz a1, .LBB13_2
-; RV32-NEXT:  .LBB13_4: # %cond.load1
-; RV32-NEXT:    lw a1, 0(a0)
-; RV32-NEXT:    lw a0, 4(a0)
-; RV32-NEXT:    vsetivli zero, 2, e32, m1, ta, ma
-; RV32-NEXT:    vslide1down.vx v9, v8, a1
-; RV32-NEXT:    vslide1down.vx v9, v9, a0
-; RV32-NEXT:    vsetivli zero, 2, e64, m1, ta, ma
-; RV32-NEXT:    vslideup.vi v8, v9, 1
+; RV32-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
+; RV32-NEXT:    viota.m v9, v0
+; RV32-NEXT:    vsll.vi v9, v9, 3, v0.t
+; RV32-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
+; RV32-NEXT:    vluxei32.v v8, (a0), v9, v0.t
 ; RV32-NEXT:    ret
 ;
 ; RV64-LABEL: expandload_v2i64:
 ; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 1, e8, m1, ta, ma
-; RV64-NEXT:    vmv.x.s a1, v0
-; RV64-NEXT:    andi a2, a1, 1
-; RV64-NEXT:    bnez a2, .LBB13_3
-; RV64-NEXT:  # %bb.1: # %else
-; RV64-NEXT:    andi a1, a1, 2
-; RV64-NEXT:    bnez a1, .LBB13_4
-; RV64-NEXT:  .LBB13_2: # %else2
-; RV64-NEXT:    ret
-; RV64-NEXT:  .LBB13_3: # %cond.load
-; RV64-NEXT:    ld a2, 0(a0)
-; RV64-NEXT:    vsetvli zero, zero, e64, m8, tu, ma
-; RV64-NEXT:    vmv.s.x v8, a2
-; RV64-NEXT:    addi a0, a0, 8
-; RV64-NEXT:    andi a1, a1, 2
-; RV64-NEXT:    beqz a1, .LBB13_2
-; RV64-NEXT:  .LBB13_4: # %cond.load1
-; RV64-NEXT:    ld a0, 0(a0)
-; RV64-NEXT:    vsetvli zero, zero, e64, m8, ta, ma
-; RV64-NEXT:    vmv.s.x v9, a0
 ; RV64-NEXT:    vsetivli zero, 2, e64, m1, ta, ma
-; RV64-NEXT:    vslideup.vi v8, v9, 1
+; RV64-NEXT:    viota.m v9, v0
+; RV64-NEXT:    vsll.vi v9, v9, 3, v0.t
+; RV64-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
+; RV64-NEXT:    vluxei64.v v8, (a0), v9, v0.t
 ; RV64-NEXT:    ret
   %res = call <2 x i64> @llvm.masked.expandload.v2i64(ptr align 8 %base, <2 x i1> %mask, <2 x i64> %src0)
   ret <2 x i64>%res
@@ -693,108 +222,20 @@ declare <4 x i64> @llvm.masked.expandload.v4i64(ptr, <4 x i1>, <4 x i64>)
 define <4 x i64> @expandload_v4i64(ptr %base, <4 x i64> %src0, <4 x i1> %mask) {
 ; RV32-LABEL: expandload_v4i64:
 ; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 1, e8, m1, ta, ma
-; RV32-NEXT:    vmv.x.s a1, v0
-; RV32-NEXT:    andi a2, a1, 1
-; RV32-NEXT:    bnez a2, .LBB14_5
-; RV32-NEXT:  # %bb.1: # %else
-; RV32-NEXT:    andi a2, a1, 2
-; RV32-NEXT:    bnez a2, .LBB14_6
-; RV32-NEXT:  .LBB14_2: # %else2
-; RV32-NEXT:    andi a2, a1, 4
-; RV32-NEXT:    bnez a2, .LBB14_7
-; RV32-NEXT:  .LBB14_3: # %else6
-; RV32-NEXT:    andi a1, a1, 8
-; RV32-NEXT:    bnez a1, .LBB14_8
-; RV32-NEXT:  .LBB14_4: # %else10
-; RV32-NEXT:    ret
-; RV32-NEXT:  .LBB14_5: # %cond.load
-; RV32-NEXT:    lw a2, 0(a0)
-; RV32-NEXT:    lw a3, 4(a0)
-; RV32-NEXT:    vsetivli zero, 2, e32, m1, tu, ma
-; RV32-NEXT:    vslide1down.vx v8, v8, a2
-; RV32-NEXT:    vslide1down.vx v8, v8, a3
-; RV32-NEXT:    addi a0, a0, 8
-; RV32-NEXT:    andi a2, a1, 2
-; RV32-NEXT:    beqz a2, .LBB14_2
-; RV32-NEXT:  .LBB14_6: # %cond.load1
-; RV32-NEXT:    lw a2, 0(a0)
-; RV32-NEXT:    lw a3, 4(a0)
-; RV32-NEXT:    vsetivli zero, 2, e32, m1, ta, ma
-; RV32-NEXT:    vslide1down.vx v10, v8, a2
-; RV32-NEXT:    vslide1down.vx v10, v10, a3
-; RV32-NEXT:    vsetivli zero, 2, e64, m1, tu, ma
-; RV32-NEXT:    vslideup.vi v8, v10, 1
-; RV32-NEXT:    addi a0, a0, 8
-; RV32-NEXT:    andi a2, a1, 4
-; RV32-NEXT:    beqz a2, .LBB14_3
-; RV32-NEXT:  .LBB14_7: # %cond.load5
-; RV32-NEXT:    lw a2, 0(a0)
-; RV32-NEXT:    lw a3, 4(a0)
-; RV32-NEXT:    vsetivli zero, 2, e32, m2, ta, ma
-; RV32-NEXT:    vslide1down.vx v10, v8, a2
-; RV32-NEXT:    vslide1down.vx v10, v10, a3
-; RV32-NEXT:    vsetivli zero, 3, e64, m2, tu, ma
-; RV32-NEXT:    vslideup.vi v8, v10, 2
-; RV32-NEXT:    addi a0, a0, 8
-; RV32-NEXT:    andi a1, a1, 8
-; RV32-NEXT:    beqz a1, .LBB14_4
-; RV32-NEXT:  .LBB14_8: # %cond.load9
-; RV32-NEXT:    lw a1, 0(a0)
-; RV32-NEXT:    lw a0, 4(a0)
-; RV32-NEXT:    vsetivli zero, 2, e32, m2, ta, ma
-; RV32-NEXT:    vslide1down.vx v10, v8, a1
-; RV32-NEXT:    vslide1down.vx v10, v10, a0
-; RV32-NEXT:    vsetivli zero, 4, e64, m2, ta, ma
-; RV32-NEXT:    vslideup.vi v8, v10, 3
+; RV32-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
+; RV32-NEXT:    viota.m v10, v0
+; RV32-NEXT:    vsll.vi v10, v10, 3, v0.t
+; RV32-NEXT:    vsetvli zero, zero, e64, m2, ta, mu
+; RV32-NEXT:    vluxei32.v v8, (a0), v10, v0.t
 ; RV32-NEXT:    ret
 ;
 ; RV64-LABEL: expandload_v4i64:
 ; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 1, e8, m1, ta, ma
-; RV64-NEXT:    vmv.x.s a1, v0
-; RV64-NEXT:    andi a2, a1, 1
-; RV64-NEXT:    bnez a2, .LBB14_5
-; RV64-NEXT:  # %bb.1: # %else
-; RV64-NEXT:    andi a2, a1, 2
-; RV64-NEXT:    bnez a2, .LBB14_6
-; RV64-NEXT:  .LBB14_2: # %else2
-; RV64-NEXT:    andi a2, a1, 4
-; RV64-NEXT:    bnez a2, .LBB14_7
-; RV64-NEXT:  .LBB14_3: # %else6
-; RV64-NEXT:    andi a1, a1, 8
-; RV64-NEXT:    bnez a1, .LBB14_8
-; RV64-NEXT:  .LBB14_4: # %else10
-; RV64-NEXT:    ret
-; RV64-NEXT:  .LBB14_5: # %cond.load
-; RV64-NEXT:    ld a2, 0(a0)
-; RV64-NEXT:    vsetvli zero, zero, e64, m8, tu, ma
-; RV64-NEXT:    vmv.s.x v8, a2
-; RV64-NEXT:    addi a0, a0, 8
-; RV64-NEXT:    andi a2, a1, 2
-; RV64-NEXT:    beqz a2, .LBB14_2
-; RV64-NEXT:  .LBB14_6: # %cond.load1
-; RV64-NEXT:    ld a2, 0(a0)
-; RV64-NEXT:    vsetvli zero, zero, e64, m8, ta, ma
-; RV64-NEXT:    vmv.s.x v10, a2
-; RV64-NEXT:    vsetivli zero, 2, e64, m1, tu, ma
-; RV64-NEXT:    vslideup.vi v8, v10, 1
-; RV64-NEXT:    addi a0, a0, 8
-; RV64-NEXT:    andi a2, a1, 4
-; RV64-NEXT:    beqz a2, .LBB14_3
-; RV64-NEXT:  .LBB14_7: # %cond.load5
-; RV64-NEXT:    ld a2, 0(a0)
-; RV64-NEXT:    vsetivli zero, 3, e64, m2, tu, ma
-; RV64-NEXT:    vmv.s.x v10, a2
-; RV64-NEXT:    vslideup.vi v8, v10, 2
-; RV64-NEXT:    addi a0, a0, 8
-; RV64-NEXT:    andi a1, a1, 8
-; RV64-NEXT:    beqz a1, .LBB14_4
-; RV64-NEXT:  .LBB14_8: # %cond.load9
-; RV64-NEXT:    ld a0, 0(a0)
 ; RV64-NEXT:    vsetivli zero, 4, e64, m2, ta, ma
-; RV64-NEXT:    vmv.s.x v10, a0
-; RV64-NEXT:    vslideup.vi v8, v10, 3
+; RV64-NEXT:    viota.m v10, v0
+; RV64-NEXT:    vsll.vi v10, v10, 3, v0.t
+; RV64-NEXT:    vsetvli zero, zero, e64, m2, ta, mu
+; RV64-NEXT:    vluxei64.v v8, (a0), v10, v0.t
 ; RV64-NEXT:    ret
   %res = call <4 x i64> @llvm.masked.expandload.v4i64(ptr align 8 %base, <4 x i1> %mask, <4 x i64> %src0)
   ret <4 x i64>%res
@@ -804,208 +245,20 @@ declare <8 x i64> @llvm.masked.expandload.v8i64(ptr, <8 x i1>, <8 x i64>)
 define <8 x i64> @expandload_v8i64(ptr %base, <8 x i64> %src0, <8 x i1> %mask) {
 ; RV32-LABEL: expandload_v8i64:
 ; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 1, e8, m1, ta, ma
-; RV32-NEXT:    vmv.x.s a1, v0
-; RV32-NEXT:    andi a2, a1, 1
-; RV32-NEXT:    bnez a2, .LBB15_9
-; RV32-NEXT:  # %bb.1: # %else
-; RV32-NEXT:    andi a2, a1, 2
-; RV32-NEXT:    bnez a2, .LBB15_10
-; RV32-NEXT:  .LBB15_2: # %else2
-; RV32-NEXT:    andi a2, a1, 4
-; RV32-NEXT:    bnez a2, .LBB15_11
-; RV32-NEXT:  .LBB15_3: # %else6
-; RV32-NEXT:    andi a2, a1, 8
-; RV32-NEXT:    bnez a2, .LBB15_12
-; RV32-NEXT:  .LBB15_4: # %else10
-; RV32-NEXT:    andi a2, a1, 16
-; RV32-NEXT:    bnez a2, .LBB15_13
-; RV32-NEXT:  .LBB15_5: # %else14
-; RV32-NEXT:    andi a2, a1, 32
-; RV32-NEXT:    bnez a2, .LBB15_14
-; RV32-NEXT:  .LBB15_6: # %else18
-; RV32-NEXT:    andi a2, a1, 64
-; RV32-NEXT:    bnez a2, .LBB15_15
-; RV32-NEXT:  .LBB15_7: # %else22
-; RV32-NEXT:    andi a1, a1, -128
-; RV32-NEXT:    bnez a1, .LBB15_16
-; RV32-NEXT:  .LBB15_8: # %else26
-; RV32-NEXT:    ret
-; RV32-NEXT:  .LBB15_9: # %cond.load
-; RV32-NEXT:    lw a2, 0(a0)
-; RV32-NEXT:    lw a3, 4(a0)
-; RV32-NEXT:    vsetivli zero, 2, e32, m1, tu, ma
-; RV32-NEXT:    vslide1down.vx v8, v8, a2
-; RV32-NEXT:    vslide1down.vx v8, v8, a3
-; RV32-NEXT:    addi a0, a0, 8
-; RV32-NEXT:    andi a2, a1, 2
-; RV32-NEXT:    beqz a2, .LBB15_2
-; RV32-NEXT:  .LBB15_10: # %cond.load1
-; RV32-NEXT:    lw a2, 0(a0)
-; RV32-NEXT:    lw a3, 4(a0)
-; RV32-NEXT:    vsetivli zero, 2, e32, m1, ta, ma
-; RV32-NEXT:    vslide1down.vx v12, v8, a2
-; RV32-NEXT:    vslide1down.vx v12, v12, a3
-; RV32-NEXT:    vsetivli zero, 2, e64, m1, tu, ma
-; RV32-NEXT:    vslideup.vi v8, v12, 1
-; RV32-NEXT:    addi a0, a0, 8
-; RV32-NEXT:    andi a2, a1, 4
-; RV32-NEXT:    beqz a2, .LBB15_3
-; RV32-NEXT:  .LBB15_11: # %cond.load5
-; RV32-NEXT:    lw a2, 0(a0)
-; RV32-NEXT:    lw a3, 4(a0)
-; RV32-NEXT:    vsetivli zero, 2, e32, m2, ta, ma
-; RV32-NEXT:    vslide1down.vx v12, v8, a2
-; RV32-NEXT:    vslide1down.vx v12, v12, a3
-; RV32-NEXT:    vsetivli zero, 3, e64, m2, tu, ma
-; RV32-NEXT:    vslideup.vi v8, v12, 2
-; RV32-NEXT:    addi a0, a0, 8
-; RV32-NEXT:    andi a2, a1, 8
-; RV32-NEXT:    beqz a2, .LBB15_4
-; RV32-NEXT:  .LBB15_12: # %cond.load9
-; RV32-NEXT:    lw a2, 0(a0)
-; RV32-NEXT:    lw a3, 4(a0)
-; RV32-NEXT:    vsetivli zero, 2, e32, m2, ta, ma
-; RV32-NEXT:    vslide1down.vx v12, v8, a2
-; RV32-NEXT:    vslide1down.vx v12, v12, a3
-; RV32-NEXT:    vsetivli zero, 4, e64, m2, tu, ma
-; RV32-NEXT:    vslideup.vi v8, v12, 3
-; RV32-NEXT:    addi a0, a0, 8
-; RV32-NEXT:    andi a2, a1, 16
-; RV32-NEXT:    beqz a2, .LBB15_5
-; RV32-NEXT:  .LBB15_13: # %cond.load13
-; RV32-NEXT:    lw a2, 0(a0)
-; RV32-NEXT:    lw a3, 4(a0)
-; RV32-NEXT:    vsetivli zero, 2, e32, m4, ta, ma
-; RV32-NEXT:    vslide1down.vx v12, v8, a2
-; RV32-NEXT:    vslide1down.vx v12, v12, a3
-; RV32-NEXT:    vsetivli zero, 5, e64, m4, tu, ma
-; RV32-NEXT:    vslideup.vi v8, v12, 4
-; RV32-NEXT:    addi a0, a0, 8
-; RV32-NEXT:    andi a2, a1, 32
-; RV32-NEXT:    beqz a2, .LBB15_6
-; RV32-NEXT:  .LBB15_14: # %cond.load17
-; RV32-NEXT:    lw a2, 0(a0)
-; RV32-NEXT:    lw a3, 4(a0)
-; RV32-NEXT:    vsetivli zero, 2, e32, m4, ta, ma
-; RV32-NEXT:    vslide1down.vx v12, v8, a2
-; RV32-NEXT:    vslide1down.vx v12, v12, a3
-; RV32-NEXT:    vsetivli zero, 6, e64, m4, tu, ma
-; RV32-NEXT:    vslideup.vi v8, v12, 5
-; RV32-NEXT:    addi a0, a0, 8
-; RV32-NEXT:    andi a2, a1, 64
-; RV32-NEXT:    beqz a2, .LBB15_7
-; RV32-NEXT:  .LBB15_15: # %cond.load21
-; RV32-NEXT:    lw a2, 0(a0)
-; RV32-NEXT:    lw a3, 4(a0)
-; RV32-NEXT:    vsetivli zero, 2, e32, m4, ta, ma
-; RV32-NEXT:    vslide1down.vx v12, v8, a2
-; RV32-NEXT:    vslide1down.vx v12, v12, a3
-; RV32-NEXT:    vsetivli zero, 7, e64, m4, tu, ma
-; RV32-NEXT:    vslideup.vi v8, v12, 6
-; RV32-NEXT:    addi a0, a0, 8
-; RV32-NEXT:    andi a1, a1, -128
-; RV32-NEXT:    beqz a1, .LBB15_8
-; RV32-NEXT:  .LBB15_16: # %cond.load25
-; RV32-NEXT:    lw a1, 0(a0)
-; RV32-NEXT:    lw a0, 4(a0)
-; RV32-NEXT:    vsetivli zero, 2, e32, m4, ta, ma
-; RV32-NEXT:    vslide1down.vx v12, v8, a1
-; RV32-NEXT:    vslide1down.vx v12, v12, a0
-; RV32-NEXT:    vsetivli zero, 8, e64, m4, ta, ma
-; RV32-NEXT:    vslideup.vi v8, v12, 7
+; RV32-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
+; RV32-NEXT:    viota.m v12, v0
+; RV32-NEXT:    vsll.vi v12, v12, 3, v0.t
+; RV32-NEXT:    vsetvli zero, zero, e64, m4, ta, mu
+; RV32-NEXT:    vluxei32.v v8, (a0), v12, v0.t
 ; RV32-NEXT:    ret
 ;
 ; RV64-LABEL: expandload_v8i64:
 ; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 1, e8, m1, ta, ma
-; RV64-NEXT:    vmv.x.s a1, v0
-; RV64-NEXT:    andi a2, a1, 1
-; RV64-NEXT:    bnez a2, .LBB15_9
-; RV64-NEXT:  # %bb.1: # %else
-; RV64-NEXT:    andi a2, a1, 2
-; RV64-NEXT:    bnez a2, .LBB15_10
-; RV64-NEXT:  .LBB15_2: # %else2
-; RV64-NEXT:    andi a2, a1, 4
-; RV64-NEXT:    bnez a2, .LBB15_11
-; RV64-NEXT:  .LBB15_3: # %else6
-; RV64-NEXT:    andi a2, a1, 8
-; RV64-NEXT:    bnez a2, .LBB15_12
-; RV64-NEXT:  .LBB15_4: # %else10
-; RV64-NEXT:    andi a2, a1, 16
-; RV64-NEXT:    bnez a2, .LBB15_13
-; RV64-NEXT:  .LBB15_5: # %else14
-; RV64-NEXT:    andi a2, a1, 32
-; RV64-NEXT:    bnez a2, .LBB15_14
-; RV64-NEXT:  .LBB15_6: # %else18
-; RV64-NEXT:    andi a2, a1, 64
-; RV64-NEXT:    bnez a2, .LBB15_15
-; RV64-NEXT:  .LBB15_7: # %else22
-; RV64-NEXT:    andi a1, a1, -128
-; RV64-NEXT:    bnez a1, .LBB15_16
-; RV64-NEXT:  .LBB15_8: # %else26
-; RV64-NEXT:    ret
-; RV64-NEXT:  .LBB15_9: # %cond.load
-; RV64-NEXT:    ld a2, 0(a0)
-; RV64-NEXT:    vsetvli zero, zero, e64, m8, tu, ma
-; RV64-NEXT:    vmv.s.x v8, a2
-; RV64-NEXT:    addi a0, a0, 8
-; RV64-NEXT:    andi a2, a1, 2
-; RV64-NEXT:    beqz a2, .LBB15_2
-; RV64-NEXT:  .LBB15_10: # %cond.load1
-; RV64-NEXT:    ld a2, 0(a0)
-; RV64-NEXT:    vsetvli zero, zero, e64, m8, ta, ma
-; RV64-NEXT:    vmv.s.x v12, a2
-; RV64-NEXT:    vsetivli zero, 2, e64, m1, tu, ma
-; RV64-NEXT:    vslideup.vi v8, v12, 1
-; RV64-NEXT:    addi a0, a0, 8
-; RV64-NEXT:    andi a2, a1, 4
-; RV64-NEXT:    beqz a2, .LBB15_3
-; RV64-NEXT:  .LBB15_11: # %cond.load5
-; RV64-NEXT:    ld a2, 0(a0)
-; RV64-NEXT:    vsetivli zero, 3, e64, m2, tu, ma
-; RV64-NEXT:    vmv.s.x v12, a2
-; RV64-NEXT:    vslideup.vi v8, v12, 2
-; RV64-NEXT:    addi a0, a0, 8
-; RV64-NEXT:    andi a2, a1, 8
-; RV64-NEXT:    beqz a2, .LBB15_4
-; RV64-NEXT:  .LBB15_12: # %cond.load9
-; RV64-NEXT:    ld a2, 0(a0)
-; RV64-NEXT:    vsetivli zero, 4, e64, m2, tu, ma
-; RV64-NEXT:    vmv.s.x v12, a2
-; RV64-NEXT:    vslideup.vi v8, v12, 3
-; RV64-NEXT:    addi a0, a0, 8
-; RV64-NEXT:    andi a2, a1, 16
-; RV64-NEXT:    beqz a2, .LBB15_5
-; RV64-NEXT:  .LBB15_13: # %cond.load13
-; RV64-NEXT:    ld a2, 0(a0)
-; RV64-NEXT:    vsetivli zero, 5, e64, m4, tu, ma
-; RV64-NEXT:    vmv.s.x v12, a2
-; RV64-NEXT:    vslideup.vi v8, v12, 4
-; RV64-NEXT:    addi a0, a0, 8
-; RV64-NEXT:    andi a2, a1, 32
-; RV64-NEXT:    beqz a2, .LBB15_6
-; RV64-NEXT:  .LBB15_14: # %cond.load17
-; RV64-NEXT:    ld a2, 0(a0)
-; RV64-NEXT:    vsetivli zero, 6, e64, m4, tu, ma
-; RV64-NEXT:    vmv.s.x v12, a2
-; RV64-NEXT:    vslideup.vi v8, v12, 5
-; RV64-NEXT:    addi a0, a0, 8
-; RV64-NEXT:    andi a2, a1, 64
-; RV64-NEXT:    beqz a2, .LBB15_7
-; RV64-NEXT:  .LBB15_15: # %cond.load21
-; RV64-NEXT:    ld a2, 0(a0)
-; RV64-NEXT:    vsetivli zero, 7, e64, m4, tu, ma
-; RV64-NEXT:    vmv.s.x v12, a2
-; RV64-NEXT:    vslideup.vi v8, v12, 6
-; RV64-NEXT:    addi a0, a0, 8
-; RV64-NEXT:    andi a1, a1, -128
-; RV64-NEXT:    beqz a1, .LBB15_8
-; RV64-NEXT:  .LBB15_16: # %cond.load25
-; RV64-NEXT:    ld a0, 0(a0)
 ; RV64-NEXT:    vsetivli zero, 8, e64, m4, ta, ma
-; RV64-NEXT:    vmv.s.x v12, a0
-; RV64-NEXT:    vslideup.vi v8, v12, 7
+; RV64-NEXT:    viota.m v12, v0
+; RV64-NEXT:    vsll.vi v12, v12, 3, v0.t
+; RV64-NEXT:    vsetvli zero, zero, e64, m4, ta, mu
+; RV64-NEXT:    vluxei64.v v8, (a0), v12, v0.t
 ; RV64-NEXT:    ret
   %res = call <8 x i64> @llvm.masked.expandload.v8i64(ptr align 8 %base, <8 x i1> %mask, <8 x i64> %src0)
   ret <8 x i64>%res

>From 9fb8b91e53b40f50447ffd2a6ffc64b6a8196f09 Mon Sep 17 00:00:00 2001
From: Wang Pengcheng <wangpengcheng.pp at bytedance.com>
Date: Tue, 6 Aug 2024 15:12:55 +0800
Subject: [PATCH 02/15] Handle index>256 cases (except for LMUL==8 case)

---
 llvm/lib/Target/RISCV/RISCVISelLowering.cpp | 14 +++++++++-
 llvm/test/CodeGen/RISCV/rvv/expandload.ll   | 31 +++++++++++++++++++++
 2 files changed, 44 insertions(+), 1 deletion(-)

diff --git a/llvm/lib/Target/RISCV/RISCVISelLowering.cpp b/llvm/lib/Target/RISCV/RISCVISelLowering.cpp
index 31aecc205a76f1..7729e669935994 100644
--- a/llvm/lib/Target/RISCV/RISCVISelLowering.cpp
+++ b/llvm/lib/Target/RISCV/RISCVISelLowering.cpp
@@ -46,6 +46,7 @@
 #include "llvm/Support/KnownBits.h"
 #include "llvm/Support/MathExtras.h"
 #include "llvm/Support/raw_ostream.h"
+#include "llvm/TargetParser/RISCVTargetParser.h"
 #include <optional>
 
 using namespace llvm;
@@ -11142,9 +11143,20 @@ SDValue RISCVTargetLowering::lowerMaskedLoad(SDValue Op,
     if (ContainerVT.isFloatingPoint())
       IndexVT = IndexVT.changeVectorElementTypeToInteger();
 
-    if (Subtarget.isRV32() && IndexVT.getVectorElementType().bitsGT(XLenVT))
+    MVT IndexEltVT = IndexVT.getVectorElementType();
+    if (Subtarget.isRV32() && IndexEltVT.bitsGT(XLenVT))
       IndexVT = IndexVT.changeVectorElementType(XLenVT);
 
+    // If index vector is an i8 vector and the element count exceeds 256, we
+    // should change the element type of index vector to i16 to avoid overflow.
+    if (IndexEltVT == MVT::i8 &&
+        VT.getVectorElementCount().getKnownMinValue() > 256) {
+      // FIXME: Don't know how to make LMUL==8 case legal.
+      assert(getLMUL(IndexVT) != RISCVII::LMUL_8 &&
+             "We don't know how to lower LMUL==8 case");
+      IndexVT = IndexVT.changeVectorElementType(MVT::i16);
+    }
+
     Index = DAG.getNode(ISD::INTRINSIC_WO_CHAIN, DL, IndexVT,
                         DAG.getConstant(Intrinsic::riscv_viota, DL, XLenVT),
                         DAG.getUNDEF(IndexVT), Mask, VL);
diff --git a/llvm/test/CodeGen/RISCV/rvv/expandload.ll b/llvm/test/CodeGen/RISCV/rvv/expandload.ll
index ae51beb0272853..83118a2f5c2e8d 100644
--- a/llvm/test/CodeGen/RISCV/rvv/expandload.ll
+++ b/llvm/test/CodeGen/RISCV/rvv/expandload.ll
@@ -1547,3 +1547,34 @@ declare <4 x i64> @llvm.masked.expandload.v4i64(ptr, <4 x i1>, <4 x i64>)
 declare <8 x i64> @llvm.masked.expandload.v8i64(ptr, <8 x i1>, <8 x i64>)
 declare <16 x i64> @llvm.masked.expandload.v16i64(ptr, <16 x i1>, <16 x i64>)
 declare <32 x i64> @llvm.masked.expandload.v32i64(ptr, <32 x i1>, <32 x i64>)
+
+define <512 x i8> @test_expandload_v512i8(ptr %base, <512 x i1> %mask, <512 x i8> %passthru) "target-features"="+zvl1024b" {
+; RV64-LABEL: test_expandload_v512i8:
+; RV64:       # %bb.0:
+; RV64-NEXT:    li a1, 512
+; RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; RV64-NEXT:    viota.m v16, v0
+; RV64-NEXT:    vsetvli zero, zero, e8, m4, ta, mu
+; RV64-NEXT:    vluxei16.v v8, (a0), v16, v0.t
+; RV64-NEXT:    ret
+;
+; RV32-LABEL: test_expandload_v512i8:
+; RV32:       # %bb.0:
+; RV32-NEXT:    li a1, 512
+; RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; RV32-NEXT:    viota.m v16, v0
+; RV32-NEXT:    vsetvli zero, zero, e8, m4, ta, mu
+; RV32-NEXT:    vluxei16.v v8, (a0), v16, v0.t
+; RV32-NEXT:    ret
+  %res = call <512 x i8> @llvm.masked.expandload.v512i8(ptr align 1 %base, <512 x i1> %mask, <512 x i8> %passthru)
+  ret <512 x i8> %res
+}
+
+; FIXME: Don't know how to make it legal.
+; define <1024 x i8> @test_expandload_v1024i8(ptr %base, <1024 x i1> %mask, <1024 x i8> %passthru) "target-features"="+zvl1024b" {
+;   %res = call <1024 x i8> @llvm.masked.expandload.v1024i8(ptr align 1 %base, <1024 x i1> %mask, <1024 x i8> %passthru)
+;   ret <1024 x i8> %res
+; }
+
+declare <512 x i8> @llvm.masked.expandload.v512i8(ptr, <512 x i1>, <512 x i8>)
+declare <1024 x i8> @llvm.masked.expandload.v1024i8(ptr, <1024 x i1>, <1024 x i8>)

>From 95fea93ca3286bcbc84b5d80b5ed199a9a748b36 Mon Sep 17 00:00:00 2001
From: Wang Pengcheng <wangpengcheng.pp at bytedance.com>
Date: Tue, 6 Aug 2024 15:31:41 +0800
Subject: [PATCH 03/15] Use getTargetConstant

---
 llvm/lib/Target/RISCV/RISCVISelLowering.cpp | 7 ++++---
 1 file changed, 4 insertions(+), 3 deletions(-)

diff --git a/llvm/lib/Target/RISCV/RISCVISelLowering.cpp b/llvm/lib/Target/RISCV/RISCVISelLowering.cpp
index 7729e669935994..33b6cec3e17e76 100644
--- a/llvm/lib/Target/RISCV/RISCVISelLowering.cpp
+++ b/llvm/lib/Target/RISCV/RISCVISelLowering.cpp
@@ -11157,9 +11157,10 @@ SDValue RISCVTargetLowering::lowerMaskedLoad(SDValue Op,
       IndexVT = IndexVT.changeVectorElementType(MVT::i16);
     }
 
-    Index = DAG.getNode(ISD::INTRINSIC_WO_CHAIN, DL, IndexVT,
-                        DAG.getConstant(Intrinsic::riscv_viota, DL, XLenVT),
-                        DAG.getUNDEF(IndexVT), Mask, VL);
+    Index =
+        DAG.getNode(ISD::INTRINSIC_WO_CHAIN, DL, IndexVT,
+                    DAG.getTargetConstant(Intrinsic::riscv_viota, DL, XLenVT),
+                    DAG.getUNDEF(IndexVT), Mask, VL);
     if (uint64_t EltSize = ContainerVT.getScalarSizeInBits(); EltSize > 8)
       Index = DAG.getNode(RISCVISD::SHL_VL, DL, IndexVT, Index,
                           DAG.getConstant(Log2_64(EltSize / 8), DL, IndexVT),

>From f6068d39997b7755736797c32ff0bf49f525f071 Mon Sep 17 00:00:00 2001
From: Wang Pengcheng <wangpengcheng.pp at bytedance.com>
Date: Tue, 6 Aug 2024 18:52:52 +0800
Subject: [PATCH 04/15] Rework tests

---
 llvm/test/CodeGen/RISCV/rvv/expandload.ll | 2072 +++++++++------------
 1 file changed, 858 insertions(+), 1214 deletions(-)

diff --git a/llvm/test/CodeGen/RISCV/rvv/expandload.ll b/llvm/test/CodeGen/RISCV/rvv/expandload.ll
index 83118a2f5c2e8d..eb862510a5bdfd 100644
--- a/llvm/test/CodeGen/RISCV/rvv/expandload.ll
+++ b/llvm/test/CodeGen/RISCV/rvv/expandload.ll
@@ -1,445 +1,335 @@
 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 5
-; RUN: llc -verify-machineinstrs -mtriple=riscv64 -mattr=+v,+d,+m,+zbb %s -o - | FileCheck %s --check-prefix=RV64
-; RUN: llc -verify-machineinstrs -mtriple=riscv32 -mattr=+v,+d,+m,+zbb %s -o - | FileCheck %s --check-prefix=RV32
+; RUN: llc -verify-machineinstrs -mtriple=riscv32 -mattr=+v,+d,+m,+zbb %s -o - | FileCheck %s --check-prefixes=CHECK,CHECK-RV32
+; RUN: llc -verify-machineinstrs -mtriple=riscv64 -mattr=+v,+d,+m,+zbb %s -o - | FileCheck %s --check-prefixes=CHECK,CHECK-RV64
 
 ; Load + expand for i8 type
 
 define <1 x i8> @test_expandload_v1i8(ptr %base, <1 x i1> %mask, <1 x i8> %passthru) {
-; RV64-LABEL: test_expandload_v1i8:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 1, e8, mf8, ta, mu
-; RV64-NEXT:    viota.m v9, v0
-; RV64-NEXT:    vluxei8.v v8, (a0), v9, v0.t
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v1i8:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 1, e8, mf8, ta, mu
-; RV32-NEXT:    viota.m v9, v0
-; RV32-NEXT:    vluxei8.v v8, (a0), v9, v0.t
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v1i8:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 1, e8, mf8, ta, mu
+; CHECK-NEXT:    viota.m v9, v0
+; CHECK-NEXT:    vluxei8.v v8, (a0), v9, v0.t
+; CHECK-NEXT:    ret
   %res = call <1 x i8> @llvm.masked.expandload.v1i8(ptr align 1 %base, <1 x i1> %mask, <1 x i8> %passthru)
   ret <1 x i8> %res
 }
 
 define <1 x i8> @test_expandload_v1i8_all_ones(ptr %base, <1 x i8> %passthru) {
-; RV64-LABEL: test_expandload_v1i8_all_ones:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 1, e8, mf8, ta, ma
-; RV64-NEXT:    vle8.v v8, (a0)
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v1i8_all_ones:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 1, e8, mf8, ta, ma
-; RV32-NEXT:    vle8.v v8, (a0)
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v1i8_all_ones:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 1, e8, mf8, ta, ma
+; CHECK-NEXT:    vle8.v v8, (a0)
+; CHECK-NEXT:    ret
   %res = call <1 x i8> @llvm.masked.expandload.v1i8(ptr align 1 %base, <1 x i1> splat (i1 true), <1 x i8> %passthru)
   ret <1 x i8> %res
 }
 
 define <2 x i8> @test_expandload_v2i8(ptr %base, <2 x i1> %mask, <2 x i8> %passthru) {
-; RV64-LABEL: test_expandload_v2i8:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 2, e8, mf8, ta, mu
-; RV64-NEXT:    viota.m v9, v0
-; RV64-NEXT:    vluxei8.v v8, (a0), v9, v0.t
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v2i8:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 2, e8, mf8, ta, mu
-; RV32-NEXT:    viota.m v9, v0
-; RV32-NEXT:    vluxei8.v v8, (a0), v9, v0.t
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v2i8:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 2, e8, mf8, ta, mu
+; CHECK-NEXT:    viota.m v9, v0
+; CHECK-NEXT:    vluxei8.v v8, (a0), v9, v0.t
+; CHECK-NEXT:    ret
   %res = call <2 x i8> @llvm.masked.expandload.v2i8(ptr align 1 %base, <2 x i1> %mask, <2 x i8> %passthru)
   ret <2 x i8> %res
 }
 
 define <2 x i8> @test_expandload_v2i8_all_ones(ptr %base, <2 x i8> %passthru) {
-; RV64-LABEL: test_expandload_v2i8_all_ones:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
-; RV64-NEXT:    vle8.v v8, (a0)
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v2i8_all_ones:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
-; RV32-NEXT:    vle8.v v8, (a0)
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v2i8_all_ones:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
+; CHECK-NEXT:    vle8.v v8, (a0)
+; CHECK-NEXT:    ret
   %res = call <2 x i8> @llvm.masked.expandload.v2i8(ptr align 1 %base, <2 x i1> splat (i1 true), <2 x i8> %passthru)
   ret <2 x i8> %res
 }
 
 define <4 x i8> @test_expandload_v4i8(ptr %base, <4 x i1> %mask, <4 x i8> %passthru) {
-; RV64-LABEL: test_expandload_v4i8:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 4, e8, mf4, ta, mu
-; RV64-NEXT:    viota.m v9, v0
-; RV64-NEXT:    vluxei8.v v8, (a0), v9, v0.t
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v4i8:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 4, e8, mf4, ta, mu
-; RV32-NEXT:    viota.m v9, v0
-; RV32-NEXT:    vluxei8.v v8, (a0), v9, v0.t
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v4i8:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 4, e8, mf4, ta, mu
+; CHECK-NEXT:    viota.m v9, v0
+; CHECK-NEXT:    vluxei8.v v8, (a0), v9, v0.t
+; CHECK-NEXT:    ret
   %res = call <4 x i8> @llvm.masked.expandload.v4i8(ptr align 1 %base, <4 x i1> %mask, <4 x i8> %passthru)
   ret <4 x i8> %res
 }
 
 define <4 x i8> @test_expandload_v4i8_all_ones(ptr %base, <4 x i8> %passthru) {
-; RV64-LABEL: test_expandload_v4i8_all_ones:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 4, e8, mf4, ta, ma
-; RV64-NEXT:    vle8.v v8, (a0)
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v4i8_all_ones:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 4, e8, mf4, ta, ma
-; RV32-NEXT:    vle8.v v8, (a0)
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v4i8_all_ones:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 4, e8, mf4, ta, ma
+; CHECK-NEXT:    vle8.v v8, (a0)
+; CHECK-NEXT:    ret
   %res = call <4 x i8> @llvm.masked.expandload.v4i8(ptr align 1 %base, <4 x i1> splat (i1 true), <4 x i8> %passthru)
   ret <4 x i8> %res
 }
 
 define <8 x i8> @test_expandload_v8i8(ptr %base, <8 x i1> %mask, <8 x i8> %passthru) {
-; RV64-LABEL: test_expandload_v8i8:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 8, e8, mf2, ta, mu
-; RV64-NEXT:    viota.m v9, v0
-; RV64-NEXT:    vluxei8.v v8, (a0), v9, v0.t
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v8i8:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 8, e8, mf2, ta, mu
-; RV32-NEXT:    viota.m v9, v0
-; RV32-NEXT:    vluxei8.v v8, (a0), v9, v0.t
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v8i8:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 8, e8, mf2, ta, mu
+; CHECK-NEXT:    viota.m v9, v0
+; CHECK-NEXT:    vluxei8.v v8, (a0), v9, v0.t
+; CHECK-NEXT:    ret
   %res = call <8 x i8> @llvm.masked.expandload.v8i8(ptr align 1 %base, <8 x i1> %mask, <8 x i8> %passthru)
   ret <8 x i8> %res
 }
 
 define <8 x i8> @test_expandload_v8i8_all_ones(ptr %base, <8 x i8> %passthru) {
-; RV64-LABEL: test_expandload_v8i8_all_ones:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
-; RV64-NEXT:    vle8.v v8, (a0)
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v8i8_all_ones:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
-; RV32-NEXT:    vle8.v v8, (a0)
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v8i8_all_ones:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
+; CHECK-NEXT:    vle8.v v8, (a0)
+; CHECK-NEXT:    ret
   %res = call <8 x i8> @llvm.masked.expandload.v8i8(ptr align 1 %base, <8 x i1> splat (i1 true), <8 x i8> %passthru)
   ret <8 x i8> %res
 }
 
 define <16 x i8> @test_expandload_v16i8(ptr %base, <16 x i1> %mask, <16 x i8> %passthru) {
-; RV64-LABEL: test_expandload_v16i8:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 16, e8, m1, ta, mu
-; RV64-NEXT:    viota.m v9, v0
-; RV64-NEXT:    vluxei8.v v8, (a0), v9, v0.t
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v16i8:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 16, e8, m1, ta, mu
-; RV32-NEXT:    viota.m v9, v0
-; RV32-NEXT:    vluxei8.v v8, (a0), v9, v0.t
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v16i8:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 16, e8, m1, ta, mu
+; CHECK-NEXT:    viota.m v9, v0
+; CHECK-NEXT:    vluxei8.v v8, (a0), v9, v0.t
+; CHECK-NEXT:    ret
   %res = call <16 x i8> @llvm.masked.expandload.v16i8(ptr align 1 %base, <16 x i1> %mask, <16 x i8> %passthru)
   ret <16 x i8> %res
 }
 
 define <16 x i8> @test_expandload_v16i8_all_ones(ptr %base, <16 x i8> %passthru) {
-; RV64-LABEL: test_expandload_v16i8_all_ones:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 16, e8, m1, ta, ma
-; RV64-NEXT:    vle8.v v8, (a0)
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v16i8_all_ones:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 16, e8, m1, ta, ma
-; RV32-NEXT:    vle8.v v8, (a0)
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v16i8_all_ones:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 16, e8, m1, ta, ma
+; CHECK-NEXT:    vle8.v v8, (a0)
+; CHECK-NEXT:    ret
   %res = call <16 x i8> @llvm.masked.expandload.v16i8(ptr align 1 %base, <16 x i1> splat (i1 true), <16 x i8> %passthru)
   ret <16 x i8> %res
 }
 
 define <32 x i8> @test_expandload_v32i8(ptr %base, <32 x i1> %mask, <32 x i8> %passthru) {
-; RV64-LABEL: test_expandload_v32i8:
-; RV64:       # %bb.0:
-; RV64-NEXT:    li a1, 32
-; RV64-NEXT:    vsetvli zero, a1, e8, m2, ta, mu
-; RV64-NEXT:    viota.m v10, v0
-; RV64-NEXT:    vluxei8.v v8, (a0), v10, v0.t
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v32i8:
-; RV32:       # %bb.0:
-; RV32-NEXT:    li a1, 32
-; RV32-NEXT:    vsetvli zero, a1, e8, m2, ta, mu
-; RV32-NEXT:    viota.m v10, v0
-; RV32-NEXT:    vluxei8.v v8, (a0), v10, v0.t
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v32i8:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    li a1, 32
+; CHECK-NEXT:    vsetvli zero, a1, e8, m2, ta, mu
+; CHECK-NEXT:    viota.m v10, v0
+; CHECK-NEXT:    vluxei8.v v8, (a0), v10, v0.t
+; CHECK-NEXT:    ret
   %res = call <32 x i8> @llvm.masked.expandload.v32i8(ptr align 1 %base, <32 x i1> %mask, <32 x i8> %passthru)
   ret <32 x i8> %res
 }
 
 define <32 x i8> @test_expandload_v32i8_all_ones(ptr %base, <32 x i8> %passthru) {
-; RV64-LABEL: test_expandload_v32i8_all_ones:
-; RV64:       # %bb.0:
-; RV64-NEXT:    li a1, 32
-; RV64-NEXT:    vsetvli zero, a1, e8, m2, ta, ma
-; RV64-NEXT:    vle8.v v8, (a0)
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v32i8_all_ones:
-; RV32:       # %bb.0:
-; RV32-NEXT:    li a1, 32
-; RV32-NEXT:    vsetvli zero, a1, e8, m2, ta, ma
-; RV32-NEXT:    vle8.v v8, (a0)
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v32i8_all_ones:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    li a1, 32
+; CHECK-NEXT:    vsetvli zero, a1, e8, m2, ta, ma
+; CHECK-NEXT:    vle8.v v8, (a0)
+; CHECK-NEXT:    ret
   %res = call <32 x i8> @llvm.masked.expandload.v32i8(ptr align 1 %base, <32 x i1> splat (i1 true), <32 x i8> %passthru)
   ret <32 x i8> %res
 }
 
 define <64 x i8> @test_expandload_v64i8(ptr %base, <64 x i1> %mask, <64 x i8> %passthru) {
-; RV64-LABEL: test_expandload_v64i8:
-; RV64:       # %bb.0:
-; RV64-NEXT:    li a1, 64
-; RV64-NEXT:    vsetvli zero, a1, e8, m4, ta, mu
-; RV64-NEXT:    viota.m v12, v0
-; RV64-NEXT:    vluxei8.v v8, (a0), v12, v0.t
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v64i8:
-; RV32:       # %bb.0:
-; RV32-NEXT:    li a1, 64
-; RV32-NEXT:    vsetvli zero, a1, e8, m4, ta, mu
-; RV32-NEXT:    viota.m v12, v0
-; RV32-NEXT:    vluxei8.v v8, (a0), v12, v0.t
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v64i8:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    li a1, 64
+; CHECK-NEXT:    vsetvli zero, a1, e8, m4, ta, mu
+; CHECK-NEXT:    viota.m v12, v0
+; CHECK-NEXT:    vluxei8.v v8, (a0), v12, v0.t
+; CHECK-NEXT:    ret
   %res = call <64 x i8> @llvm.masked.expandload.v64i8(ptr align 1 %base, <64 x i1> %mask, <64 x i8> %passthru)
   ret <64 x i8> %res
 }
 
 define <64 x i8> @test_expandload_v64i8_all_ones(ptr %base, <64 x i8> %passthru) {
-; RV64-LABEL: test_expandload_v64i8_all_ones:
-; RV64:       # %bb.0:
-; RV64-NEXT:    li a1, 64
-; RV64-NEXT:    vsetvli zero, a1, e8, m4, ta, ma
-; RV64-NEXT:    vle8.v v8, (a0)
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v64i8_all_ones:
-; RV32:       # %bb.0:
-; RV32-NEXT:    li a1, 64
-; RV32-NEXT:    vsetvli zero, a1, e8, m4, ta, ma
-; RV32-NEXT:    vle8.v v8, (a0)
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v64i8_all_ones:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    li a1, 64
+; CHECK-NEXT:    vsetvli zero, a1, e8, m4, ta, ma
+; CHECK-NEXT:    vle8.v v8, (a0)
+; CHECK-NEXT:    ret
   %res = call <64 x i8> @llvm.masked.expandload.v64i8(ptr align 1 %base, <64 x i1> splat (i1 true), <64 x i8> %passthru)
   ret <64 x i8> %res
 }
 
 define <128 x i8> @test_expandload_v128i8(ptr %base, <128 x i1> %mask, <128 x i8> %passthru) {
-; RV64-LABEL: test_expandload_v128i8:
-; RV64:       # %bb.0:
-; RV64-NEXT:    li a1, 128
-; RV64-NEXT:    vsetvli zero, a1, e8, m8, ta, mu
-; RV64-NEXT:    viota.m v16, v0
-; RV64-NEXT:    vluxei8.v v8, (a0), v16, v0.t
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v128i8:
-; RV32:       # %bb.0:
-; RV32-NEXT:    li a1, 128
-; RV32-NEXT:    vsetvli zero, a1, e8, m8, ta, mu
-; RV32-NEXT:    viota.m v16, v0
-; RV32-NEXT:    vluxei8.v v8, (a0), v16, v0.t
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v128i8:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    li a1, 128
+; CHECK-NEXT:    vsetvli zero, a1, e8, m8, ta, mu
+; CHECK-NEXT:    viota.m v16, v0
+; CHECK-NEXT:    vluxei8.v v8, (a0), v16, v0.t
+; CHECK-NEXT:    ret
   %res = call <128 x i8> @llvm.masked.expandload.v128i8(ptr align 1 %base, <128 x i1> %mask, <128 x i8> %passthru)
   ret <128 x i8> %res
 }
 
 define <128 x i8> @test_expandload_v128i8_all_ones(ptr %base, <128 x i8> %passthru) {
-; RV64-LABEL: test_expandload_v128i8_all_ones:
-; RV64:       # %bb.0:
-; RV64-NEXT:    li a1, 128
-; RV64-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
-; RV64-NEXT:    vle8.v v8, (a0)
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v128i8_all_ones:
-; RV32:       # %bb.0:
-; RV32-NEXT:    li a1, 128
-; RV32-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
-; RV32-NEXT:    vle8.v v8, (a0)
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v128i8_all_ones:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    li a1, 128
+; CHECK-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
+; CHECK-NEXT:    vle8.v v8, (a0)
+; CHECK-NEXT:    ret
   %res = call <128 x i8> @llvm.masked.expandload.v128i8(ptr align 1 %base, <128 x i1> splat (i1 true), <128 x i8> %passthru)
   ret <128 x i8> %res
 }
 
 define <256 x i8> @test_expandload_v256i8(ptr %base, <256 x i1> %mask, <256 x i8> %passthru) {
-; RV64-LABEL: test_expandload_v256i8:
-; RV64:       # %bb.0:
-; RV64-NEXT:    addi sp, sp, -16
-; RV64-NEXT:    .cfi_def_cfa_offset 16
-; RV64-NEXT:    csrr a2, vlenb
-; RV64-NEXT:    slli a2, a2, 4
-; RV64-NEXT:    sub sp, sp, a2
-; RV64-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
-; RV64-NEXT:    addi a2, sp, 16
-; RV64-NEXT:    vs8r.v v16, (a2) # Unknown-size Folded Spill
-; RV64-NEXT:    vmv1r.v v9, v0
-; RV64-NEXT:    li a2, 128
-; RV64-NEXT:    vsetvli zero, a2, e8, m8, ta, ma
-; RV64-NEXT:    vle8.v v16, (a1)
-; RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; RV64-NEXT:    vslidedown.vi v10, v0, 1
-; RV64-NEXT:    vmv.x.s a1, v10
-; RV64-NEXT:    vmv.x.s a3, v0
-; RV64-NEXT:    vsetvli zero, a2, e8, m8, ta, mu
-; RV64-NEXT:    viota.m v24, v8
-; RV64-NEXT:    csrr a2, vlenb
-; RV64-NEXT:    slli a2, a2, 3
-; RV64-NEXT:    add a2, sp, a2
-; RV64-NEXT:    addi a2, a2, 16
-; RV64-NEXT:    vs8r.v v24, (a2) # Unknown-size Folded Spill
-; RV64-NEXT:    cpop a2, a3
-; RV64-NEXT:    cpop a1, a1
-; RV64-NEXT:    add a2, a0, a2
-; RV64-NEXT:    add a1, a2, a1
-; RV64-NEXT:    vmv1r.v v0, v8
-; RV64-NEXT:    csrr a2, vlenb
-; RV64-NEXT:    slli a2, a2, 3
-; RV64-NEXT:    add a2, sp, a2
-; RV64-NEXT:    addi a2, a2, 16
-; RV64-NEXT:    vl8r.v v24, (a2) # Unknown-size Folded Reload
-; RV64-NEXT:    vluxei8.v v16, (a1), v24, v0.t
-; RV64-NEXT:    viota.m v24, v9
-; RV64-NEXT:    vmv1r.v v0, v9
-; RV64-NEXT:    addi a1, sp, 16
-; RV64-NEXT:    vl8r.v v8, (a1) # Unknown-size Folded Reload
-; RV64-NEXT:    vluxei8.v v8, (a0), v24, v0.t
-; RV64-NEXT:    csrr a0, vlenb
-; RV64-NEXT:    slli a0, a0, 4
-; RV64-NEXT:    add sp, sp, a0
-; RV64-NEXT:    addi sp, sp, 16
-; RV64-NEXT:    ret
+; CHECK-RV32-LABEL: test_expandload_v256i8:
+; CHECK-RV32:       # %bb.0:
+; CHECK-RV32-NEXT:    addi sp, sp, -16
+; CHECK-RV32-NEXT:    .cfi_def_cfa_offset 16
+; CHECK-RV32-NEXT:    csrr a2, vlenb
+; CHECK-RV32-NEXT:    slli a2, a2, 4
+; CHECK-RV32-NEXT:    sub sp, sp, a2
+; CHECK-RV32-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
+; CHECK-RV32-NEXT:    addi a2, sp, 16
+; CHECK-RV32-NEXT:    vs8r.v v16, (a2) # Unknown-size Folded Spill
+; CHECK-RV32-NEXT:    vmv1r.v v9, v0
+; CHECK-RV32-NEXT:    li a2, 128
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, ta, ma
+; CHECK-RV32-NEXT:    vle8.v v16, (a1)
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vslidedown.vi v10, v0, 1
+; CHECK-RV32-NEXT:    li a1, 32
+; CHECK-RV32-NEXT:    vsrl.vx v11, v10, a1
+; CHECK-RV32-NEXT:    vmv.x.s a3, v11
+; CHECK-RV32-NEXT:    vsrl.vx v11, v0, a1
+; CHECK-RV32-NEXT:    vmv.x.s a1, v11
+; CHECK-RV32-NEXT:    vmv.x.s a4, v10
+; CHECK-RV32-NEXT:    vmv.x.s a5, v0
+; CHECK-RV32-NEXT:    cpop a1, a1
+; CHECK-RV32-NEXT:    cpop a5, a5
+; CHECK-RV32-NEXT:    add a1, a5, a1
+; CHECK-RV32-NEXT:    cpop a3, a3
+; CHECK-RV32-NEXT:    cpop a4, a4
+; CHECK-RV32-NEXT:    add a3, a4, a3
+; CHECK-RV32-NEXT:    add a1, a1, a3
+; CHECK-RV32-NEXT:    add a1, a0, a1
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, ta, mu
+; CHECK-RV32-NEXT:    viota.m v24, v8
+; CHECK-RV32-NEXT:    csrr a2, vlenb
+; CHECK-RV32-NEXT:    slli a2, a2, 3
+; CHECK-RV32-NEXT:    add a2, sp, a2
+; CHECK-RV32-NEXT:    addi a2, a2, 16
+; CHECK-RV32-NEXT:    vs8r.v v24, (a2) # Unknown-size Folded Spill
+; CHECK-RV32-NEXT:    vmv1r.v v0, v8
+; CHECK-RV32-NEXT:    csrr a2, vlenb
+; CHECK-RV32-NEXT:    slli a2, a2, 3
+; CHECK-RV32-NEXT:    add a2, sp, a2
+; CHECK-RV32-NEXT:    addi a2, a2, 16
+; CHECK-RV32-NEXT:    vl8r.v v24, (a2) # Unknown-size Folded Reload
+; CHECK-RV32-NEXT:    vluxei8.v v16, (a1), v24, v0.t
+; CHECK-RV32-NEXT:    viota.m v24, v9
+; CHECK-RV32-NEXT:    vmv1r.v v0, v9
+; CHECK-RV32-NEXT:    addi a1, sp, 16
+; CHECK-RV32-NEXT:    vl8r.v v8, (a1) # Unknown-size Folded Reload
+; CHECK-RV32-NEXT:    vluxei8.v v8, (a0), v24, v0.t
+; CHECK-RV32-NEXT:    csrr a0, vlenb
+; CHECK-RV32-NEXT:    slli a0, a0, 4
+; CHECK-RV32-NEXT:    add sp, sp, a0
+; CHECK-RV32-NEXT:    addi sp, sp, 16
+; CHECK-RV32-NEXT:    ret
 ;
-; RV32-LABEL: test_expandload_v256i8:
-; RV32:       # %bb.0:
-; RV32-NEXT:    addi sp, sp, -16
-; RV32-NEXT:    .cfi_def_cfa_offset 16
-; RV32-NEXT:    csrr a2, vlenb
-; RV32-NEXT:    slli a2, a2, 4
-; RV32-NEXT:    sub sp, sp, a2
-; RV32-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
-; RV32-NEXT:    addi a2, sp, 16
-; RV32-NEXT:    vs8r.v v16, (a2) # Unknown-size Folded Spill
-; RV32-NEXT:    vmv1r.v v9, v0
-; RV32-NEXT:    li a2, 128
-; RV32-NEXT:    vsetvli zero, a2, e8, m8, ta, ma
-; RV32-NEXT:    vle8.v v16, (a1)
-; RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; RV32-NEXT:    vslidedown.vi v10, v0, 1
-; RV32-NEXT:    li a1, 32
-; RV32-NEXT:    vsrl.vx v11, v10, a1
-; RV32-NEXT:    vmv.x.s a3, v11
-; RV32-NEXT:    vsrl.vx v11, v0, a1
-; RV32-NEXT:    vmv.x.s a1, v11
-; RV32-NEXT:    vmv.x.s a4, v10
-; RV32-NEXT:    vmv.x.s a5, v0
-; RV32-NEXT:    cpop a1, a1
-; RV32-NEXT:    cpop a5, a5
-; RV32-NEXT:    add a1, a5, a1
-; RV32-NEXT:    cpop a3, a3
-; RV32-NEXT:    cpop a4, a4
-; RV32-NEXT:    add a3, a4, a3
-; RV32-NEXT:    add a1, a1, a3
-; RV32-NEXT:    add a1, a0, a1
-; RV32-NEXT:    vsetvli zero, a2, e8, m8, ta, mu
-; RV32-NEXT:    viota.m v24, v8
-; RV32-NEXT:    csrr a2, vlenb
-; RV32-NEXT:    slli a2, a2, 3
-; RV32-NEXT:    add a2, sp, a2
-; RV32-NEXT:    addi a2, a2, 16
-; RV32-NEXT:    vs8r.v v24, (a2) # Unknown-size Folded Spill
-; RV32-NEXT:    vmv1r.v v0, v8
-; RV32-NEXT:    csrr a2, vlenb
-; RV32-NEXT:    slli a2, a2, 3
-; RV32-NEXT:    add a2, sp, a2
-; RV32-NEXT:    addi a2, a2, 16
-; RV32-NEXT:    vl8r.v v24, (a2) # Unknown-size Folded Reload
-; RV32-NEXT:    vluxei8.v v16, (a1), v24, v0.t
-; RV32-NEXT:    viota.m v24, v9
-; RV32-NEXT:    vmv1r.v v0, v9
-; RV32-NEXT:    addi a1, sp, 16
-; RV32-NEXT:    vl8r.v v8, (a1) # Unknown-size Folded Reload
-; RV32-NEXT:    vluxei8.v v8, (a0), v24, v0.t
-; RV32-NEXT:    csrr a0, vlenb
-; RV32-NEXT:    slli a0, a0, 4
-; RV32-NEXT:    add sp, sp, a0
-; RV32-NEXT:    addi sp, sp, 16
-; RV32-NEXT:    ret
+; CHECK-RV64-LABEL: test_expandload_v256i8:
+; CHECK-RV64:       # %bb.0:
+; CHECK-RV64-NEXT:    addi sp, sp, -16
+; CHECK-RV64-NEXT:    .cfi_def_cfa_offset 16
+; CHECK-RV64-NEXT:    csrr a2, vlenb
+; CHECK-RV64-NEXT:    slli a2, a2, 4
+; CHECK-RV64-NEXT:    sub sp, sp, a2
+; CHECK-RV64-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
+; CHECK-RV64-NEXT:    addi a2, sp, 16
+; CHECK-RV64-NEXT:    vs8r.v v16, (a2) # Unknown-size Folded Spill
+; CHECK-RV64-NEXT:    vmv1r.v v9, v0
+; CHECK-RV64-NEXT:    li a2, 128
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, ta, ma
+; CHECK-RV64-NEXT:    vle8.v v16, (a1)
+; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV64-NEXT:    vslidedown.vi v10, v0, 1
+; CHECK-RV64-NEXT:    vmv.x.s a1, v10
+; CHECK-RV64-NEXT:    vmv.x.s a3, v0
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, ta, mu
+; CHECK-RV64-NEXT:    viota.m v24, v8
+; CHECK-RV64-NEXT:    csrr a2, vlenb
+; CHECK-RV64-NEXT:    slli a2, a2, 3
+; CHECK-RV64-NEXT:    add a2, sp, a2
+; CHECK-RV64-NEXT:    addi a2, a2, 16
+; CHECK-RV64-NEXT:    vs8r.v v24, (a2) # Unknown-size Folded Spill
+; CHECK-RV64-NEXT:    cpop a2, a3
+; CHECK-RV64-NEXT:    cpop a1, a1
+; CHECK-RV64-NEXT:    add a2, a0, a2
+; CHECK-RV64-NEXT:    add a1, a2, a1
+; CHECK-RV64-NEXT:    vmv1r.v v0, v8
+; CHECK-RV64-NEXT:    csrr a2, vlenb
+; CHECK-RV64-NEXT:    slli a2, a2, 3
+; CHECK-RV64-NEXT:    add a2, sp, a2
+; CHECK-RV64-NEXT:    addi a2, a2, 16
+; CHECK-RV64-NEXT:    vl8r.v v24, (a2) # Unknown-size Folded Reload
+; CHECK-RV64-NEXT:    vluxei8.v v16, (a1), v24, v0.t
+; CHECK-RV64-NEXT:    viota.m v24, v9
+; CHECK-RV64-NEXT:    vmv1r.v v0, v9
+; CHECK-RV64-NEXT:    addi a1, sp, 16
+; CHECK-RV64-NEXT:    vl8r.v v8, (a1) # Unknown-size Folded Reload
+; CHECK-RV64-NEXT:    vluxei8.v v8, (a0), v24, v0.t
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    slli a0, a0, 4
+; CHECK-RV64-NEXT:    add sp, sp, a0
+; CHECK-RV64-NEXT:    addi sp, sp, 16
+; CHECK-RV64-NEXT:    ret
   %res = call <256 x i8> @llvm.masked.expandload.v256i8(ptr align 1 %base, <256 x i1> %mask, <256 x i8> %passthru)
   ret <256 x i8> %res
 }
 
 define <256 x i8> @test_expandload_v256i8_all_ones(ptr %base, <256 x i8> %passthru) {
-; RV64-LABEL: test_expandload_v256i8_all_ones:
-; RV64:       # %bb.0:
-; RV64-NEXT:    li a1, 128
-; RV64-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
-; RV64-NEXT:    vle8.v v8, (a0)
-; RV64-NEXT:    vmset.m v16
-; RV64-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
-; RV64-NEXT:    vmv.x.s a2, v16
-; RV64-NEXT:    cpop a2, a2
-; RV64-NEXT:    vslidedown.vi v16, v16, 1
-; RV64-NEXT:    vmv.x.s a3, v16
-; RV64-NEXT:    cpop a3, a3
-; RV64-NEXT:    add a0, a0, a2
-; RV64-NEXT:    add a0, a0, a3
-; RV64-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
-; RV64-NEXT:    vle8.v v16, (a0)
-; RV64-NEXT:    ret
+; CHECK-RV32-LABEL: test_expandload_v256i8_all_ones:
+; CHECK-RV32:       # %bb.0:
+; CHECK-RV32-NEXT:    li a1, 128
+; CHECK-RV32-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
+; CHECK-RV32-NEXT:    vmset.m v8
+; CHECK-RV32-NEXT:    li a2, 32
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vsrl.vx v9, v8, a2
+; CHECK-RV32-NEXT:    vmv.x.s a3, v9
+; CHECK-RV32-NEXT:    cpop a3, a3
+; CHECK-RV32-NEXT:    vmv.x.s a4, v8
+; CHECK-RV32-NEXT:    cpop a4, a4
+; CHECK-RV32-NEXT:    add a3, a4, a3
+; CHECK-RV32-NEXT:    vslidedown.vi v8, v8, 1
+; CHECK-RV32-NEXT:    vsrl.vx v9, v8, a2
+; CHECK-RV32-NEXT:    vmv.x.s a2, v9
+; CHECK-RV32-NEXT:    cpop a2, a2
+; CHECK-RV32-NEXT:    vmv.x.s a4, v8
+; CHECK-RV32-NEXT:    cpop a4, a4
+; CHECK-RV32-NEXT:    add a2, a4, a2
+; CHECK-RV32-NEXT:    add a3, a0, a3
+; CHECK-RV32-NEXT:    add a2, a3, a2
+; CHECK-RV32-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
+; CHECK-RV32-NEXT:    vle8.v v16, (a2)
+; CHECK-RV32-NEXT:    vle8.v v8, (a0)
+; CHECK-RV32-NEXT:    ret
 ;
-; RV32-LABEL: test_expandload_v256i8_all_ones:
-; RV32:       # %bb.0:
-; RV32-NEXT:    li a1, 128
-; RV32-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
-; RV32-NEXT:    vmset.m v8
-; RV32-NEXT:    li a2, 32
-; RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; RV32-NEXT:    vsrl.vx v9, v8, a2
-; RV32-NEXT:    vmv.x.s a3, v9
-; RV32-NEXT:    cpop a3, a3
-; RV32-NEXT:    vmv.x.s a4, v8
-; RV32-NEXT:    cpop a4, a4
-; RV32-NEXT:    add a3, a4, a3
-; RV32-NEXT:    vslidedown.vi v8, v8, 1
-; RV32-NEXT:    vsrl.vx v9, v8, a2
-; RV32-NEXT:    vmv.x.s a2, v9
-; RV32-NEXT:    cpop a2, a2
-; RV32-NEXT:    vmv.x.s a4, v8
-; RV32-NEXT:    cpop a4, a4
-; RV32-NEXT:    add a2, a4, a2
-; RV32-NEXT:    add a3, a0, a3
-; RV32-NEXT:    add a2, a3, a2
-; RV32-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
-; RV32-NEXT:    vle8.v v16, (a2)
-; RV32-NEXT:    vle8.v v8, (a0)
-; RV32-NEXT:    ret
+; CHECK-RV64-LABEL: test_expandload_v256i8_all_ones:
+; CHECK-RV64:       # %bb.0:
+; CHECK-RV64-NEXT:    li a1, 128
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
+; CHECK-RV64-NEXT:    vle8.v v8, (a0)
+; CHECK-RV64-NEXT:    vmset.m v16
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.x.s a2, v16
+; CHECK-RV64-NEXT:    cpop a2, a2
+; CHECK-RV64-NEXT:    vslidedown.vi v16, v16, 1
+; CHECK-RV64-NEXT:    vmv.x.s a3, v16
+; CHECK-RV64-NEXT:    cpop a3, a3
+; CHECK-RV64-NEXT:    add a0, a0, a2
+; CHECK-RV64-NEXT:    add a0, a0, a3
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
+; CHECK-RV64-NEXT:    vle8.v v16, (a0)
+; CHECK-RV64-NEXT:    ret
   %res = call <256 x i8> @llvm.masked.expandload.v256i8(ptr align 1 %base, <256 x i1> splat (i1 true), <256 x i8> %passthru)
   ret <256 x i8> %res
 }
@@ -454,426 +344,317 @@ declare <64 x i8> @llvm.masked.expandload.v64i8(ptr, <64 x i1>, <64 x i8>)
 declare <128 x i8> @llvm.masked.expandload.v128i8(ptr, <128 x i1>, <128 x i8>)
 declare <256 x i8> @llvm.masked.expandload.v256i8(ptr, <256 x i1>, <256 x i8>)
 
-; Compress + store for i16 type
+; Load + expand for i16 type
 
 define <1 x i16> @test_expandload_v1i16(ptr %base, <1 x i1> %mask, <1 x i16> %passthru) {
-; RV64-LABEL: test_expandload_v1i16:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 1, e16, mf4, ta, ma
-; RV64-NEXT:    viota.m v9, v0
-; RV64-NEXT:    vsll.vi v9, v9, 1, v0.t
-; RV64-NEXT:    vsetvli zero, zero, e16, mf4, ta, mu
-; RV64-NEXT:    vluxei16.v v8, (a0), v9, v0.t
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v1i16:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 1, e16, mf4, ta, ma
-; RV32-NEXT:    viota.m v9, v0
-; RV32-NEXT:    vsll.vi v9, v9, 1, v0.t
-; RV32-NEXT:    vsetvli zero, zero, e16, mf4, ta, mu
-; RV32-NEXT:    vluxei16.v v8, (a0), v9, v0.t
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v1i16:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 1, e16, mf4, ta, ma
+; CHECK-NEXT:    viota.m v9, v0
+; CHECK-NEXT:    vsll.vi v9, v9, 1, v0.t
+; CHECK-NEXT:    vsetvli zero, zero, e16, mf4, ta, mu
+; CHECK-NEXT:    vluxei16.v v8, (a0), v9, v0.t
+; CHECK-NEXT:    ret
   %res = call <1 x i16> @llvm.masked.expandload.v1i16(ptr align 2 %base, <1 x i1> %mask, <1 x i16> %passthru)
   ret <1 x i16> %res
 }
 
 define <1 x i16> @test_expandload_v1i16_all_ones(ptr %base, <1 x i16> %passthru) {
-; RV64-LABEL: test_expandload_v1i16_all_ones:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 1, e16, mf4, ta, ma
-; RV64-NEXT:    vle16.v v8, (a0)
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v1i16_all_ones:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 1, e16, mf4, ta, ma
-; RV32-NEXT:    vle16.v v8, (a0)
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v1i16_all_ones:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 1, e16, mf4, ta, ma
+; CHECK-NEXT:    vle16.v v8, (a0)
+; CHECK-NEXT:    ret
   %res = call <1 x i16> @llvm.masked.expandload.v1i16(ptr align 2 %base, <1 x i1> splat (i1 true), <1 x i16> %passthru)
   ret <1 x i16> %res
 }
 
 define <2 x i16> @test_expandload_v2i16(ptr %base, <2 x i1> %mask, <2 x i16> %passthru) {
-; RV64-LABEL: test_expandload_v2i16:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 2, e16, mf4, ta, ma
-; RV64-NEXT:    viota.m v9, v0
-; RV64-NEXT:    vsll.vi v9, v9, 1, v0.t
-; RV64-NEXT:    vsetvli zero, zero, e16, mf4, ta, mu
-; RV64-NEXT:    vluxei16.v v8, (a0), v9, v0.t
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v2i16:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 2, e16, mf4, ta, ma
-; RV32-NEXT:    viota.m v9, v0
-; RV32-NEXT:    vsll.vi v9, v9, 1, v0.t
-; RV32-NEXT:    vsetvli zero, zero, e16, mf4, ta, mu
-; RV32-NEXT:    vluxei16.v v8, (a0), v9, v0.t
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v2i16:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 2, e16, mf4, ta, ma
+; CHECK-NEXT:    viota.m v9, v0
+; CHECK-NEXT:    vsll.vi v9, v9, 1, v0.t
+; CHECK-NEXT:    vsetvli zero, zero, e16, mf4, ta, mu
+; CHECK-NEXT:    vluxei16.v v8, (a0), v9, v0.t
+; CHECK-NEXT:    ret
   %res = call <2 x i16> @llvm.masked.expandload.v2i16(ptr align 2 %base, <2 x i1> %mask, <2 x i16> %passthru)
   ret <2 x i16> %res
 }
 
 define <2 x i16> @test_expandload_v2i16_all_ones(ptr %base, <2 x i16> %passthru) {
-; RV64-LABEL: test_expandload_v2i16_all_ones:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 2, e16, mf4, ta, ma
-; RV64-NEXT:    vle16.v v8, (a0)
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v2i16_all_ones:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 2, e16, mf4, ta, ma
-; RV32-NEXT:    vle16.v v8, (a0)
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v2i16_all_ones:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 2, e16, mf4, ta, ma
+; CHECK-NEXT:    vle16.v v8, (a0)
+; CHECK-NEXT:    ret
   %res = call <2 x i16> @llvm.masked.expandload.v2i16(ptr align 2 %base, <2 x i1> splat (i1 true), <2 x i16> %passthru)
   ret <2 x i16> %res
 }
 
 define <4 x i16> @test_expandload_v4i16(ptr %base, <4 x i1> %mask, <4 x i16> %passthru) {
-; RV64-LABEL: test_expandload_v4i16:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 4, e16, mf2, ta, ma
-; RV64-NEXT:    viota.m v9, v0
-; RV64-NEXT:    vsll.vi v9, v9, 1, v0.t
-; RV64-NEXT:    vsetvli zero, zero, e16, mf2, ta, mu
-; RV64-NEXT:    vluxei16.v v8, (a0), v9, v0.t
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v4i16:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 4, e16, mf2, ta, ma
-; RV32-NEXT:    viota.m v9, v0
-; RV32-NEXT:    vsll.vi v9, v9, 1, v0.t
-; RV32-NEXT:    vsetvli zero, zero, e16, mf2, ta, mu
-; RV32-NEXT:    vluxei16.v v8, (a0), v9, v0.t
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v4i16:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 4, e16, mf2, ta, ma
+; CHECK-NEXT:    viota.m v9, v0
+; CHECK-NEXT:    vsll.vi v9, v9, 1, v0.t
+; CHECK-NEXT:    vsetvli zero, zero, e16, mf2, ta, mu
+; CHECK-NEXT:    vluxei16.v v8, (a0), v9, v0.t
+; CHECK-NEXT:    ret
   %res = call <4 x i16> @llvm.masked.expandload.v4i16(ptr align 2 %base, <4 x i1> %mask, <4 x i16> %passthru)
   ret <4 x i16> %res
 }
 
 define <4 x i16> @test_expandload_v4i16_all_ones(ptr %base, <4 x i16> %passthru) {
-; RV64-LABEL: test_expandload_v4i16_all_ones:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 4, e16, mf2, ta, ma
-; RV64-NEXT:    vle16.v v8, (a0)
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v4i16_all_ones:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 4, e16, mf2, ta, ma
-; RV32-NEXT:    vle16.v v8, (a0)
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v4i16_all_ones:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 4, e16, mf2, ta, ma
+; CHECK-NEXT:    vle16.v v8, (a0)
+; CHECK-NEXT:    ret
   %res = call <4 x i16> @llvm.masked.expandload.v4i16(ptr align 2 %base, <4 x i1> splat (i1 true), <4 x i16> %passthru)
   ret <4 x i16> %res
 }
 
 define <8 x i16> @test_expandload_v8i16(ptr %base, <8 x i1> %mask, <8 x i16> %passthru) {
-; RV64-LABEL: test_expandload_v8i16:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 8, e16, m1, ta, ma
-; RV64-NEXT:    viota.m v9, v0
-; RV64-NEXT:    vsll.vi v9, v9, 1, v0.t
-; RV64-NEXT:    vsetvli zero, zero, e16, m1, ta, mu
-; RV64-NEXT:    vluxei16.v v8, (a0), v9, v0.t
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v8i16:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 8, e16, m1, ta, ma
-; RV32-NEXT:    viota.m v9, v0
-; RV32-NEXT:    vsll.vi v9, v9, 1, v0.t
-; RV32-NEXT:    vsetvli zero, zero, e16, m1, ta, mu
-; RV32-NEXT:    vluxei16.v v8, (a0), v9, v0.t
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v8i16:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 8, e16, m1, ta, ma
+; CHECK-NEXT:    viota.m v9, v0
+; CHECK-NEXT:    vsll.vi v9, v9, 1, v0.t
+; CHECK-NEXT:    vsetvli zero, zero, e16, m1, ta, mu
+; CHECK-NEXT:    vluxei16.v v8, (a0), v9, v0.t
+; CHECK-NEXT:    ret
   %res = call <8 x i16> @llvm.masked.expandload.v8i16(ptr align 2 %base, <8 x i1> %mask, <8 x i16> %passthru)
   ret <8 x i16> %res
 }
 
 define <8 x i16> @test_expandload_v8i16_all_ones(ptr %base, <8 x i16> %passthru) {
-; RV64-LABEL: test_expandload_v8i16_all_ones:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 8, e16, m1, ta, ma
-; RV64-NEXT:    vle16.v v8, (a0)
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v8i16_all_ones:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 8, e16, m1, ta, ma
-; RV32-NEXT:    vle16.v v8, (a0)
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v8i16_all_ones:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 8, e16, m1, ta, ma
+; CHECK-NEXT:    vle16.v v8, (a0)
+; CHECK-NEXT:    ret
   %res = call <8 x i16> @llvm.masked.expandload.v8i16(ptr align 2 %base, <8 x i1> splat (i1 true), <8 x i16> %passthru)
   ret <8 x i16> %res
 }
 
 define <16 x i16> @test_expandload_v16i16(ptr %base, <16 x i1> %mask, <16 x i16> %passthru) {
-; RV64-LABEL: test_expandload_v16i16:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 16, e16, m2, ta, ma
-; RV64-NEXT:    viota.m v10, v0
-; RV64-NEXT:    vsll.vi v10, v10, 1, v0.t
-; RV64-NEXT:    vsetvli zero, zero, e16, m2, ta, mu
-; RV64-NEXT:    vluxei16.v v8, (a0), v10, v0.t
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v16i16:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 16, e16, m2, ta, ma
-; RV32-NEXT:    viota.m v10, v0
-; RV32-NEXT:    vsll.vi v10, v10, 1, v0.t
-; RV32-NEXT:    vsetvli zero, zero, e16, m2, ta, mu
-; RV32-NEXT:    vluxei16.v v8, (a0), v10, v0.t
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v16i16:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 16, e16, m2, ta, ma
+; CHECK-NEXT:    viota.m v10, v0
+; CHECK-NEXT:    vsll.vi v10, v10, 1, v0.t
+; CHECK-NEXT:    vsetvli zero, zero, e16, m2, ta, mu
+; CHECK-NEXT:    vluxei16.v v8, (a0), v10, v0.t
+; CHECK-NEXT:    ret
   %res = call <16 x i16> @llvm.masked.expandload.v16i16(ptr align 2 %base, <16 x i1> %mask, <16 x i16> %passthru)
   ret <16 x i16> %res
 }
 
 define <16 x i16> @test_expandload_v16i16_all_ones(ptr %base, <16 x i16> %passthru) {
-; RV64-LABEL: test_expandload_v16i16_all_ones:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 16, e16, m2, ta, ma
-; RV64-NEXT:    vle16.v v8, (a0)
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v16i16_all_ones:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 16, e16, m2, ta, ma
-; RV32-NEXT:    vle16.v v8, (a0)
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v16i16_all_ones:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 16, e16, m2, ta, ma
+; CHECK-NEXT:    vle16.v v8, (a0)
+; CHECK-NEXT:    ret
   %res = call <16 x i16> @llvm.masked.expandload.v16i16(ptr align 2 %base, <16 x i1> splat (i1 true), <16 x i16> %passthru)
   ret <16 x i16> %res
 }
 
 define <32 x i16> @test_expandload_v32i16(ptr %base, <32 x i1> %mask, <32 x i16> %passthru) {
-; RV64-LABEL: test_expandload_v32i16:
-; RV64:       # %bb.0:
-; RV64-NEXT:    li a1, 32
-; RV64-NEXT:    vsetvli zero, a1, e16, m4, ta, ma
-; RV64-NEXT:    viota.m v12, v0
-; RV64-NEXT:    vsll.vi v12, v12, 1, v0.t
-; RV64-NEXT:    vsetvli zero, zero, e16, m4, ta, mu
-; RV64-NEXT:    vluxei16.v v8, (a0), v12, v0.t
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v32i16:
-; RV32:       # %bb.0:
-; RV32-NEXT:    li a1, 32
-; RV32-NEXT:    vsetvli zero, a1, e16, m4, ta, ma
-; RV32-NEXT:    viota.m v12, v0
-; RV32-NEXT:    vsll.vi v12, v12, 1, v0.t
-; RV32-NEXT:    vsetvli zero, zero, e16, m4, ta, mu
-; RV32-NEXT:    vluxei16.v v8, (a0), v12, v0.t
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v32i16:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    li a1, 32
+; CHECK-NEXT:    vsetvli zero, a1, e16, m4, ta, ma
+; CHECK-NEXT:    viota.m v12, v0
+; CHECK-NEXT:    vsll.vi v12, v12, 1, v0.t
+; CHECK-NEXT:    vsetvli zero, zero, e16, m4, ta, mu
+; CHECK-NEXT:    vluxei16.v v8, (a0), v12, v0.t
+; CHECK-NEXT:    ret
   %res = call <32 x i16> @llvm.masked.expandload.v32i16(ptr align 2 %base, <32 x i1> %mask, <32 x i16> %passthru)
   ret <32 x i16> %res
 }
 
 define <32 x i16> @test_expandload_v32i16_all_ones(ptr %base, <32 x i16> %passthru) {
-; RV64-LABEL: test_expandload_v32i16_all_ones:
-; RV64:       # %bb.0:
-; RV64-NEXT:    li a1, 32
-; RV64-NEXT:    vsetvli zero, a1, e16, m4, ta, ma
-; RV64-NEXT:    vle16.v v8, (a0)
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v32i16_all_ones:
-; RV32:       # %bb.0:
-; RV32-NEXT:    li a1, 32
-; RV32-NEXT:    vsetvli zero, a1, e16, m4, ta, ma
-; RV32-NEXT:    vle16.v v8, (a0)
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v32i16_all_ones:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    li a1, 32
+; CHECK-NEXT:    vsetvli zero, a1, e16, m4, ta, ma
+; CHECK-NEXT:    vle16.v v8, (a0)
+; CHECK-NEXT:    ret
   %res = call <32 x i16> @llvm.masked.expandload.v32i16(ptr align 2 %base, <32 x i1> splat (i1 true), <32 x i16> %passthru)
   ret <32 x i16> %res
 }
 
 define <64 x i16> @test_expandload_v64i16(ptr %base, <64 x i1> %mask, <64 x i16> %passthru) {
-; RV64-LABEL: test_expandload_v64i16:
-; RV64:       # %bb.0:
-; RV64-NEXT:    li a1, 64
-; RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
-; RV64-NEXT:    viota.m v16, v0
-; RV64-NEXT:    vsll.vi v16, v16, 1, v0.t
-; RV64-NEXT:    vsetvli zero, zero, e16, m8, ta, mu
-; RV64-NEXT:    vluxei16.v v8, (a0), v16, v0.t
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v64i16:
-; RV32:       # %bb.0:
-; RV32-NEXT:    li a1, 64
-; RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
-; RV32-NEXT:    viota.m v16, v0
-; RV32-NEXT:    vsll.vi v16, v16, 1, v0.t
-; RV32-NEXT:    vsetvli zero, zero, e16, m8, ta, mu
-; RV32-NEXT:    vluxei16.v v8, (a0), v16, v0.t
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v64i16:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    li a1, 64
+; CHECK-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; CHECK-NEXT:    viota.m v16, v0
+; CHECK-NEXT:    vsll.vi v16, v16, 1, v0.t
+; CHECK-NEXT:    vsetvli zero, zero, e16, m8, ta, mu
+; CHECK-NEXT:    vluxei16.v v8, (a0), v16, v0.t
+; CHECK-NEXT:    ret
   %res = call <64 x i16> @llvm.masked.expandload.v64i16(ptr align 2 %base, <64 x i1> %mask, <64 x i16> %passthru)
   ret <64 x i16> %res
 }
 
 define <64 x i16> @test_expandload_v64i16_all_ones(ptr %base, <64 x i16> %passthru) {
-; RV64-LABEL: test_expandload_v64i16_all_ones:
-; RV64:       # %bb.0:
-; RV64-NEXT:    li a1, 64
-; RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
-; RV64-NEXT:    vle16.v v8, (a0)
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v64i16_all_ones:
-; RV32:       # %bb.0:
-; RV32-NEXT:    li a1, 64
-; RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
-; RV32-NEXT:    vle16.v v8, (a0)
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v64i16_all_ones:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    li a1, 64
+; CHECK-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; CHECK-NEXT:    vle16.v v8, (a0)
+; CHECK-NEXT:    ret
   %res = call <64 x i16> @llvm.masked.expandload.v64i16(ptr align 2 %base, <64 x i1> splat (i1 true), <64 x i16> %passthru)
   ret <64 x i16> %res
 }
 
 define <128 x i16> @test_expandload_v128i16(ptr %base, <128 x i1> %mask, <128 x i16> %passthru) {
-; RV64-LABEL: test_expandload_v128i16:
-; RV64:       # %bb.0:
-; RV64-NEXT:    addi sp, sp, -16
-; RV64-NEXT:    .cfi_def_cfa_offset 16
-; RV64-NEXT:    csrr a1, vlenb
-; RV64-NEXT:    slli a1, a1, 4
-; RV64-NEXT:    sub sp, sp, a1
-; RV64-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
-; RV64-NEXT:    csrr a1, vlenb
-; RV64-NEXT:    slli a1, a1, 3
-; RV64-NEXT:    add a1, sp, a1
-; RV64-NEXT:    addi a1, a1, 16
-; RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
-; RV64-NEXT:    vmv1r.v v7, v0
-; RV64-NEXT:    vsetivli zero, 8, e8, m1, ta, ma
-; RV64-NEXT:    vslidedown.vi v0, v0, 8
-; RV64-NEXT:    li a1, 64
-; RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
-; RV64-NEXT:    viota.m v16, v0
-; RV64-NEXT:    vsll.vi v16, v16, 1, v0.t
-; RV64-NEXT:    addi a2, sp, 16
-; RV64-NEXT:    vs8r.v v16, (a2) # Unknown-size Folded Spill
-; RV64-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
-; RV64-NEXT:    vmv.x.s a2, v7
-; RV64-NEXT:    cpop a2, a2
-; RV64-NEXT:    slli a2, a2, 1
-; RV64-NEXT:    add a2, a0, a2
-; RV64-NEXT:    csrr a3, vlenb
-; RV64-NEXT:    slli a3, a3, 3
-; RV64-NEXT:    add a3, sp, a3
-; RV64-NEXT:    addi a3, a3, 16
-; RV64-NEXT:    vl8r.v v16, (a3) # Unknown-size Folded Reload
-; RV64-NEXT:    addi a3, sp, 16
-; RV64-NEXT:    vl8r.v v24, (a3) # Unknown-size Folded Reload
-; RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, mu
-; RV64-NEXT:    vluxei16.v v16, (a2), v24, v0.t
-; RV64-NEXT:    viota.m v24, v7
-; RV64-NEXT:    vmv1r.v v0, v7
-; RV64-NEXT:    vsetvli zero, zero, e16, m8, ta, ma
-; RV64-NEXT:    vsll.vi v24, v24, 1, v0.t
-; RV64-NEXT:    vsetvli zero, zero, e16, m8, ta, mu
-; RV64-NEXT:    vluxei16.v v8, (a0), v24, v0.t
-; RV64-NEXT:    csrr a0, vlenb
-; RV64-NEXT:    slli a0, a0, 4
-; RV64-NEXT:    add sp, sp, a0
-; RV64-NEXT:    addi sp, sp, 16
-; RV64-NEXT:    ret
+; CHECK-RV32-LABEL: test_expandload_v128i16:
+; CHECK-RV32:       # %bb.0:
+; CHECK-RV32-NEXT:    addi sp, sp, -16
+; CHECK-RV32-NEXT:    .cfi_def_cfa_offset 16
+; CHECK-RV32-NEXT:    csrr a1, vlenb
+; CHECK-RV32-NEXT:    slli a1, a1, 4
+; CHECK-RV32-NEXT:    sub sp, sp, a1
+; CHECK-RV32-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
+; CHECK-RV32-NEXT:    csrr a1, vlenb
+; CHECK-RV32-NEXT:    slli a1, a1, 3
+; CHECK-RV32-NEXT:    add a1, sp, a1
+; CHECK-RV32-NEXT:    addi a1, a1, 16
+; CHECK-RV32-NEXT:    vs8r.v v8, (a1) # Unknown-size Folded Spill
+; CHECK-RV32-NEXT:    vmv1r.v v24, v0
+; CHECK-RV32-NEXT:    vsetivli zero, 8, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vslidedown.vi v0, v0, 8
+; CHECK-RV32-NEXT:    li a1, 64
+; CHECK-RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; CHECK-RV32-NEXT:    viota.m v8, v0
+; CHECK-RV32-NEXT:    vsll.vi v8, v8, 1, v0.t
+; CHECK-RV32-NEXT:    addi a2, sp, 16
+; CHECK-RV32-NEXT:    vs8r.v v8, (a2) # Unknown-size Folded Spill
+; CHECK-RV32-NEXT:    li a2, 32
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vsrl.vx v8, v24, a2
+; CHECK-RV32-NEXT:    vmv.x.s a2, v8
+; CHECK-RV32-NEXT:    cpop a2, a2
+; CHECK-RV32-NEXT:    vmv.x.s a3, v24
+; CHECK-RV32-NEXT:    cpop a3, a3
+; CHECK-RV32-NEXT:    add a2, a3, a2
+; CHECK-RV32-NEXT:    slli a2, a2, 1
+; CHECK-RV32-NEXT:    add a2, a0, a2
+; CHECK-RV32-NEXT:    addi a3, sp, 16
+; CHECK-RV32-NEXT:    vl8r.v v8, (a3) # Unknown-size Folded Reload
+; CHECK-RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, mu
+; CHECK-RV32-NEXT:    vluxei16.v v16, (a2), v8, v0.t
+; CHECK-RV32-NEXT:    viota.m v8, v24
+; CHECK-RV32-NEXT:    vmv1r.v v0, v24
+; CHECK-RV32-NEXT:    vsetvli zero, zero, e16, m8, ta, ma
+; CHECK-RV32-NEXT:    vsll.vi v8, v8, 1, v0.t
+; CHECK-RV32-NEXT:    addi a1, sp, 16
+; CHECK-RV32-NEXT:    vs8r.v v8, (a1) # Unknown-size Folded Spill
+; CHECK-RV32-NEXT:    csrr a1, vlenb
+; CHECK-RV32-NEXT:    slli a1, a1, 3
+; CHECK-RV32-NEXT:    add a1, sp, a1
+; CHECK-RV32-NEXT:    addi a1, a1, 16
+; CHECK-RV32-NEXT:    vl8r.v v8, (a1) # Unknown-size Folded Reload
+; CHECK-RV32-NEXT:    addi a1, sp, 16
+; CHECK-RV32-NEXT:    vl8r.v v24, (a1) # Unknown-size Folded Reload
+; CHECK-RV32-NEXT:    vsetvli zero, zero, e16, m8, ta, mu
+; CHECK-RV32-NEXT:    vluxei16.v v8, (a0), v24, v0.t
+; CHECK-RV32-NEXT:    csrr a0, vlenb
+; CHECK-RV32-NEXT:    slli a0, a0, 4
+; CHECK-RV32-NEXT:    add sp, sp, a0
+; CHECK-RV32-NEXT:    addi sp, sp, 16
+; CHECK-RV32-NEXT:    ret
 ;
-; RV32-LABEL: test_expandload_v128i16:
-; RV32:       # %bb.0:
-; RV32-NEXT:    addi sp, sp, -16
-; RV32-NEXT:    .cfi_def_cfa_offset 16
-; RV32-NEXT:    csrr a1, vlenb
-; RV32-NEXT:    slli a1, a1, 4
-; RV32-NEXT:    sub sp, sp, a1
-; RV32-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
-; RV32-NEXT:    csrr a1, vlenb
-; RV32-NEXT:    slli a1, a1, 3
-; RV32-NEXT:    add a1, sp, a1
-; RV32-NEXT:    addi a1, a1, 16
-; RV32-NEXT:    vs8r.v v8, (a1) # Unknown-size Folded Spill
-; RV32-NEXT:    vmv1r.v v24, v0
-; RV32-NEXT:    vsetivli zero, 8, e8, m1, ta, ma
-; RV32-NEXT:    vslidedown.vi v0, v0, 8
-; RV32-NEXT:    li a1, 64
-; RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
-; RV32-NEXT:    viota.m v8, v0
-; RV32-NEXT:    vsll.vi v8, v8, 1, v0.t
-; RV32-NEXT:    addi a2, sp, 16
-; RV32-NEXT:    vs8r.v v8, (a2) # Unknown-size Folded Spill
-; RV32-NEXT:    li a2, 32
-; RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; RV32-NEXT:    vsrl.vx v8, v24, a2
-; RV32-NEXT:    vmv.x.s a2, v8
-; RV32-NEXT:    cpop a2, a2
-; RV32-NEXT:    vmv.x.s a3, v24
-; RV32-NEXT:    cpop a3, a3
-; RV32-NEXT:    add a2, a3, a2
-; RV32-NEXT:    slli a2, a2, 1
-; RV32-NEXT:    add a2, a0, a2
-; RV32-NEXT:    addi a3, sp, 16
-; RV32-NEXT:    vl8r.v v8, (a3) # Unknown-size Folded Reload
-; RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, mu
-; RV32-NEXT:    vluxei16.v v16, (a2), v8, v0.t
-; RV32-NEXT:    viota.m v8, v24
-; RV32-NEXT:    vmv1r.v v0, v24
-; RV32-NEXT:    vsetvli zero, zero, e16, m8, ta, ma
-; RV32-NEXT:    vsll.vi v8, v8, 1, v0.t
-; RV32-NEXT:    addi a1, sp, 16
-; RV32-NEXT:    vs8r.v v8, (a1) # Unknown-size Folded Spill
-; RV32-NEXT:    csrr a1, vlenb
-; RV32-NEXT:    slli a1, a1, 3
-; RV32-NEXT:    add a1, sp, a1
-; RV32-NEXT:    addi a1, a1, 16
-; RV32-NEXT:    vl8r.v v8, (a1) # Unknown-size Folded Reload
-; RV32-NEXT:    addi a1, sp, 16
-; RV32-NEXT:    vl8r.v v24, (a1) # Unknown-size Folded Reload
-; RV32-NEXT:    vsetvli zero, zero, e16, m8, ta, mu
-; RV32-NEXT:    vluxei16.v v8, (a0), v24, v0.t
-; RV32-NEXT:    csrr a0, vlenb
-; RV32-NEXT:    slli a0, a0, 4
-; RV32-NEXT:    add sp, sp, a0
-; RV32-NEXT:    addi sp, sp, 16
-; RV32-NEXT:    ret
+; CHECK-RV64-LABEL: test_expandload_v128i16:
+; CHECK-RV64:       # %bb.0:
+; CHECK-RV64-NEXT:    addi sp, sp, -16
+; CHECK-RV64-NEXT:    .cfi_def_cfa_offset 16
+; CHECK-RV64-NEXT:    csrr a1, vlenb
+; CHECK-RV64-NEXT:    slli a1, a1, 4
+; CHECK-RV64-NEXT:    sub sp, sp, a1
+; CHECK-RV64-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
+; CHECK-RV64-NEXT:    csrr a1, vlenb
+; CHECK-RV64-NEXT:    slli a1, a1, 3
+; CHECK-RV64-NEXT:    add a1, sp, a1
+; CHECK-RV64-NEXT:    addi a1, a1, 16
+; CHECK-RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
+; CHECK-RV64-NEXT:    vmv1r.v v7, v0
+; CHECK-RV64-NEXT:    vsetivli zero, 8, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vslidedown.vi v0, v0, 8
+; CHECK-RV64-NEXT:    li a1, 64
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; CHECK-RV64-NEXT:    viota.m v16, v0
+; CHECK-RV64-NEXT:    vsll.vi v16, v16, 1, v0.t
+; CHECK-RV64-NEXT:    addi a2, sp, 16
+; CHECK-RV64-NEXT:    vs8r.v v16, (a2) # Unknown-size Folded Spill
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.x.s a2, v7
+; CHECK-RV64-NEXT:    cpop a2, a2
+; CHECK-RV64-NEXT:    slli a2, a2, 1
+; CHECK-RV64-NEXT:    add a2, a0, a2
+; CHECK-RV64-NEXT:    csrr a3, vlenb
+; CHECK-RV64-NEXT:    slli a3, a3, 3
+; CHECK-RV64-NEXT:    add a3, sp, a3
+; CHECK-RV64-NEXT:    addi a3, a3, 16
+; CHECK-RV64-NEXT:    vl8r.v v16, (a3) # Unknown-size Folded Reload
+; CHECK-RV64-NEXT:    addi a3, sp, 16
+; CHECK-RV64-NEXT:    vl8r.v v24, (a3) # Unknown-size Folded Reload
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, mu
+; CHECK-RV64-NEXT:    vluxei16.v v16, (a2), v24, v0.t
+; CHECK-RV64-NEXT:    viota.m v24, v7
+; CHECK-RV64-NEXT:    vmv1r.v v0, v7
+; CHECK-RV64-NEXT:    vsetvli zero, zero, e16, m8, ta, ma
+; CHECK-RV64-NEXT:    vsll.vi v24, v24, 1, v0.t
+; CHECK-RV64-NEXT:    vsetvli zero, zero, e16, m8, ta, mu
+; CHECK-RV64-NEXT:    vluxei16.v v8, (a0), v24, v0.t
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    slli a0, a0, 4
+; CHECK-RV64-NEXT:    add sp, sp, a0
+; CHECK-RV64-NEXT:    addi sp, sp, 16
+; CHECK-RV64-NEXT:    ret
   %res = call <128 x i16> @llvm.masked.expandload.v128i16(ptr align 2 %base, <128 x i1> %mask, <128 x i16> %passthru)
   ret <128 x i16> %res
 }
 
 define <128 x i16> @test_expandload_v128i16_all_ones(ptr %base, <128 x i16> %passthru) {
-; RV64-LABEL: test_expandload_v128i16_all_ones:
-; RV64:       # %bb.0:
-; RV64-NEXT:    li a1, 64
-; RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
-; RV64-NEXT:    vle16.v v8, (a0)
-; RV64-NEXT:    vmset.m v16
-; RV64-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
-; RV64-NEXT:    vmv.x.s a2, v16
-; RV64-NEXT:    cpop a2, a2
-; RV64-NEXT:    slli a2, a2, 1
-; RV64-NEXT:    add a0, a0, a2
-; RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
-; RV64-NEXT:    vle16.v v16, (a0)
-; RV64-NEXT:    ret
+; CHECK-RV32-LABEL: test_expandload_v128i16_all_ones:
+; CHECK-RV32:       # %bb.0:
+; CHECK-RV32-NEXT:    li a1, 64
+; CHECK-RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; CHECK-RV32-NEXT:    vle16.v v8, (a0)
+; CHECK-RV32-NEXT:    vmset.m v16
+; CHECK-RV32-NEXT:    li a2, 32
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vsrl.vx v17, v16, a2
+; CHECK-RV32-NEXT:    vmv.x.s a2, v17
+; CHECK-RV32-NEXT:    cpop a2, a2
+; CHECK-RV32-NEXT:    vmv.x.s a3, v16
+; CHECK-RV32-NEXT:    cpop a3, a3
+; CHECK-RV32-NEXT:    add a2, a3, a2
+; CHECK-RV32-NEXT:    slli a2, a2, 1
+; CHECK-RV32-NEXT:    add a0, a0, a2
+; CHECK-RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; CHECK-RV32-NEXT:    vle16.v v16, (a0)
+; CHECK-RV32-NEXT:    ret
 ;
-; RV32-LABEL: test_expandload_v128i16_all_ones:
-; RV32:       # %bb.0:
-; RV32-NEXT:    li a1, 64
-; RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
-; RV32-NEXT:    vle16.v v8, (a0)
-; RV32-NEXT:    vmset.m v16
-; RV32-NEXT:    li a2, 32
-; RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; RV32-NEXT:    vsrl.vx v17, v16, a2
-; RV32-NEXT:    vmv.x.s a2, v17
-; RV32-NEXT:    cpop a2, a2
-; RV32-NEXT:    vmv.x.s a3, v16
-; RV32-NEXT:    cpop a3, a3
-; RV32-NEXT:    add a2, a3, a2
-; RV32-NEXT:    slli a2, a2, 1
-; RV32-NEXT:    add a0, a0, a2
-; RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
-; RV32-NEXT:    vle16.v v16, (a0)
-; RV32-NEXT:    ret
+; CHECK-RV64-LABEL: test_expandload_v128i16_all_ones:
+; CHECK-RV64:       # %bb.0:
+; CHECK-RV64-NEXT:    li a1, 64
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; CHECK-RV64-NEXT:    vle16.v v8, (a0)
+; CHECK-RV64-NEXT:    vmset.m v16
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.x.s a2, v16
+; CHECK-RV64-NEXT:    cpop a2, a2
+; CHECK-RV64-NEXT:    slli a2, a2, 1
+; CHECK-RV64-NEXT:    add a0, a0, a2
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; CHECK-RV64-NEXT:    vle16.v v16, (a0)
+; CHECK-RV64-NEXT:    ret
   %res = call <128 x i16> @llvm.masked.expandload.v128i16(ptr align 2 %base, <128 x i1> splat (i1 true), <128 x i16> %passthru)
   ret <128 x i16> %res
 }
@@ -887,360 +668,268 @@ declare <32 x i16> @llvm.masked.expandload.v32i16(ptr, <32 x i1>, <32 x i16>)
 declare <64 x i16> @llvm.masked.expandload.v64i16(ptr, <64 x i1>, <64 x i16>)
 declare <128 x i16> @llvm.masked.expandload.v128i16(ptr, <128 x i1>, <128 x i16>)
 
-; Compress + store for i32 type
+; Load + expand for i32 type
 
 define <1 x i32> @test_expandload_v1i32(ptr %base, <1 x i1> %mask, <1 x i32> %passthru) {
-; RV64-LABEL: test_expandload_v1i32:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 1, e32, mf2, ta, ma
-; RV64-NEXT:    viota.m v9, v0
-; RV64-NEXT:    vsll.vi v9, v9, 2, v0.t
-; RV64-NEXT:    vsetvli zero, zero, e32, mf2, ta, mu
-; RV64-NEXT:    vluxei32.v v8, (a0), v9, v0.t
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v1i32:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 1, e32, mf2, ta, ma
-; RV32-NEXT:    viota.m v9, v0
-; RV32-NEXT:    vsll.vi v9, v9, 2, v0.t
-; RV32-NEXT:    vsetvli zero, zero, e32, mf2, ta, mu
-; RV32-NEXT:    vluxei32.v v8, (a0), v9, v0.t
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v1i32:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 1, e32, mf2, ta, ma
+; CHECK-NEXT:    viota.m v9, v0
+; CHECK-NEXT:    vsll.vi v9, v9, 2, v0.t
+; CHECK-NEXT:    vsetvli zero, zero, e32, mf2, ta, mu
+; CHECK-NEXT:    vluxei32.v v8, (a0), v9, v0.t
+; CHECK-NEXT:    ret
   %res = call <1 x i32> @llvm.masked.expandload.v1i32(ptr align 4 %base, <1 x i1> %mask, <1 x i32> %passthru)
   ret <1 x i32> %res
 }
 
 define <1 x i32> @test_expandload_v1i32_all_ones(ptr %base, <1 x i32> %passthru) {
-; RV64-LABEL: test_expandload_v1i32_all_ones:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 1, e32, mf2, ta, ma
-; RV64-NEXT:    vle32.v v8, (a0)
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v1i32_all_ones:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 1, e32, mf2, ta, ma
-; RV32-NEXT:    vle32.v v8, (a0)
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v1i32_all_ones:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 1, e32, mf2, ta, ma
+; CHECK-NEXT:    vle32.v v8, (a0)
+; CHECK-NEXT:    ret
   %res = call <1 x i32> @llvm.masked.expandload.v1i32(ptr align 4 %base, <1 x i1> splat (i1 true), <1 x i32> %passthru)
   ret <1 x i32> %res
 }
 
 define <2 x i32> @test_expandload_v2i32(ptr %base, <2 x i1> %mask, <2 x i32> %passthru) {
-; RV64-LABEL: test_expandload_v2i32:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
-; RV64-NEXT:    viota.m v9, v0
-; RV64-NEXT:    vsll.vi v9, v9, 2, v0.t
-; RV64-NEXT:    vsetvli zero, zero, e32, mf2, ta, mu
-; RV64-NEXT:    vluxei32.v v8, (a0), v9, v0.t
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v2i32:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
-; RV32-NEXT:    viota.m v9, v0
-; RV32-NEXT:    vsll.vi v9, v9, 2, v0.t
-; RV32-NEXT:    vsetvli zero, zero, e32, mf2, ta, mu
-; RV32-NEXT:    vluxei32.v v8, (a0), v9, v0.t
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v2i32:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
+; CHECK-NEXT:    viota.m v9, v0
+; CHECK-NEXT:    vsll.vi v9, v9, 2, v0.t
+; CHECK-NEXT:    vsetvli zero, zero, e32, mf2, ta, mu
+; CHECK-NEXT:    vluxei32.v v8, (a0), v9, v0.t
+; CHECK-NEXT:    ret
   %res = call <2 x i32> @llvm.masked.expandload.v2i32(ptr align 4 %base, <2 x i1> %mask, <2 x i32> %passthru)
   ret <2 x i32> %res
 }
 
 define <2 x i32> @test_expandload_v2i32_all_ones(ptr %base, <2 x i32> %passthru) {
-; RV64-LABEL: test_expandload_v2i32_all_ones:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
-; RV64-NEXT:    vle32.v v8, (a0)
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v2i32_all_ones:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
-; RV32-NEXT:    vle32.v v8, (a0)
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v2i32_all_ones:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
+; CHECK-NEXT:    vle32.v v8, (a0)
+; CHECK-NEXT:    ret
   %res = call <2 x i32> @llvm.masked.expandload.v2i32(ptr align 4 %base, <2 x i1> splat (i1 true), <2 x i32> %passthru)
   ret <2 x i32> %res
 }
 
 define <4 x i32> @test_expandload_v4i32(ptr %base, <4 x i1> %mask, <4 x i32> %passthru) {
-; RV64-LABEL: test_expandload_v4i32:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
-; RV64-NEXT:    viota.m v9, v0
-; RV64-NEXT:    vsll.vi v9, v9, 2, v0.t
-; RV64-NEXT:    vsetvli zero, zero, e32, m1, ta, mu
-; RV64-NEXT:    vluxei32.v v8, (a0), v9, v0.t
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v4i32:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
-; RV32-NEXT:    viota.m v9, v0
-; RV32-NEXT:    vsll.vi v9, v9, 2, v0.t
-; RV32-NEXT:    vsetvli zero, zero, e32, m1, ta, mu
-; RV32-NEXT:    vluxei32.v v8, (a0), v9, v0.t
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v4i32:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
+; CHECK-NEXT:    viota.m v9, v0
+; CHECK-NEXT:    vsll.vi v9, v9, 2, v0.t
+; CHECK-NEXT:    vsetvli zero, zero, e32, m1, ta, mu
+; CHECK-NEXT:    vluxei32.v v8, (a0), v9, v0.t
+; CHECK-NEXT:    ret
   %res = call <4 x i32> @llvm.masked.expandload.v4i32(ptr align 4 %base, <4 x i1> %mask, <4 x i32> %passthru)
   ret <4 x i32> %res
 }
 
 define <4 x i32> @test_expandload_v4i32_all_ones(ptr %base, <4 x i32> %passthru) {
-; RV64-LABEL: test_expandload_v4i32_all_ones:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
-; RV64-NEXT:    vle32.v v8, (a0)
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v4i32_all_ones:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
-; RV32-NEXT:    vle32.v v8, (a0)
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v4i32_all_ones:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
+; CHECK-NEXT:    vle32.v v8, (a0)
+; CHECK-NEXT:    ret
   %res = call <4 x i32> @llvm.masked.expandload.v4i32(ptr align 4 %base, <4 x i1> splat (i1 true), <4 x i32> %passthru)
   ret <4 x i32> %res
 }
 
 define <8 x i32> @test_expandload_v8i32(ptr %base, <8 x i1> %mask, <8 x i32> %passthru) {
-; RV64-LABEL: test_expandload_v8i32:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
-; RV64-NEXT:    viota.m v10, v0
-; RV64-NEXT:    vsll.vi v10, v10, 2, v0.t
-; RV64-NEXT:    vsetvli zero, zero, e32, m2, ta, mu
-; RV64-NEXT:    vluxei32.v v8, (a0), v10, v0.t
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v8i32:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
-; RV32-NEXT:    viota.m v10, v0
-; RV32-NEXT:    vsll.vi v10, v10, 2, v0.t
-; RV32-NEXT:    vsetvli zero, zero, e32, m2, ta, mu
-; RV32-NEXT:    vluxei32.v v8, (a0), v10, v0.t
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v8i32:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
+; CHECK-NEXT:    viota.m v10, v0
+; CHECK-NEXT:    vsll.vi v10, v10, 2, v0.t
+; CHECK-NEXT:    vsetvli zero, zero, e32, m2, ta, mu
+; CHECK-NEXT:    vluxei32.v v8, (a0), v10, v0.t
+; CHECK-NEXT:    ret
   %res = call <8 x i32> @llvm.masked.expandload.v8i32(ptr align 4 %base, <8 x i1> %mask, <8 x i32> %passthru)
   ret <8 x i32> %res
 }
 
 define <8 x i32> @test_expandload_v8i32_all_ones(ptr %base, <8 x i32> %passthru) {
-; RV64-LABEL: test_expandload_v8i32_all_ones:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
-; RV64-NEXT:    vle32.v v8, (a0)
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v8i32_all_ones:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
-; RV32-NEXT:    vle32.v v8, (a0)
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v8i32_all_ones:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
+; CHECK-NEXT:    vle32.v v8, (a0)
+; CHECK-NEXT:    ret
   %res = call <8 x i32> @llvm.masked.expandload.v8i32(ptr align 4 %base, <8 x i1> splat (i1 true), <8 x i32> %passthru)
   ret <8 x i32> %res
 }
 
 define <16 x i32> @test_expandload_v16i32(ptr %base, <16 x i1> %mask, <16 x i32> %passthru) {
-; RV64-LABEL: test_expandload_v16i32:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 16, e32, m4, ta, ma
-; RV64-NEXT:    viota.m v12, v0
-; RV64-NEXT:    vsll.vi v12, v12, 2, v0.t
-; RV64-NEXT:    vsetvli zero, zero, e32, m4, ta, mu
-; RV64-NEXT:    vluxei32.v v8, (a0), v12, v0.t
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v16i32:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 16, e32, m4, ta, ma
-; RV32-NEXT:    viota.m v12, v0
-; RV32-NEXT:    vsll.vi v12, v12, 2, v0.t
-; RV32-NEXT:    vsetvli zero, zero, e32, m4, ta, mu
-; RV32-NEXT:    vluxei32.v v8, (a0), v12, v0.t
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v16i32:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 16, e32, m4, ta, ma
+; CHECK-NEXT:    viota.m v12, v0
+; CHECK-NEXT:    vsll.vi v12, v12, 2, v0.t
+; CHECK-NEXT:    vsetvli zero, zero, e32, m4, ta, mu
+; CHECK-NEXT:    vluxei32.v v8, (a0), v12, v0.t
+; CHECK-NEXT:    ret
   %res = call <16 x i32> @llvm.masked.expandload.v16i32(ptr align 4 %base, <16 x i1> %mask, <16 x i32> %passthru)
   ret <16 x i32> %res
 }
 
 define <16 x i32> @test_expandload_v16i32_all_ones(ptr %base, <16 x i32> %passthru) {
-; RV64-LABEL: test_expandload_v16i32_all_ones:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 16, e32, m4, ta, ma
-; RV64-NEXT:    vle32.v v8, (a0)
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v16i32_all_ones:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 16, e32, m4, ta, ma
-; RV32-NEXT:    vle32.v v8, (a0)
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v16i32_all_ones:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 16, e32, m4, ta, ma
+; CHECK-NEXT:    vle32.v v8, (a0)
+; CHECK-NEXT:    ret
   %res = call <16 x i32> @llvm.masked.expandload.v16i32(ptr align 4 %base, <16 x i1> splat (i1 true), <16 x i32> %passthru)
   ret <16 x i32> %res
 }
 
 define <32 x i32> @test_expandload_v32i32(ptr %base, <32 x i1> %mask, <32 x i32> %passthru) {
-; RV64-LABEL: test_expandload_v32i32:
-; RV64:       # %bb.0:
-; RV64-NEXT:    li a1, 32
-; RV64-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
-; RV64-NEXT:    viota.m v16, v0
-; RV64-NEXT:    vsll.vi v16, v16, 2, v0.t
-; RV64-NEXT:    vsetvli zero, zero, e32, m8, ta, mu
-; RV64-NEXT:    vluxei32.v v8, (a0), v16, v0.t
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v32i32:
-; RV32:       # %bb.0:
-; RV32-NEXT:    li a1, 32
-; RV32-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
-; RV32-NEXT:    viota.m v16, v0
-; RV32-NEXT:    vsll.vi v16, v16, 2, v0.t
-; RV32-NEXT:    vsetvli zero, zero, e32, m8, ta, mu
-; RV32-NEXT:    vluxei32.v v8, (a0), v16, v0.t
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v32i32:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    li a1, 32
+; CHECK-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
+; CHECK-NEXT:    viota.m v16, v0
+; CHECK-NEXT:    vsll.vi v16, v16, 2, v0.t
+; CHECK-NEXT:    vsetvli zero, zero, e32, m8, ta, mu
+; CHECK-NEXT:    vluxei32.v v8, (a0), v16, v0.t
+; CHECK-NEXT:    ret
   %res = call <32 x i32> @llvm.masked.expandload.v32i32(ptr align 4 %base, <32 x i1> %mask, <32 x i32> %passthru)
   ret <32 x i32> %res
 }
 
 define <32 x i32> @test_expandload_v32i32_all_ones(ptr %base, <32 x i32> %passthru) {
-; RV64-LABEL: test_expandload_v32i32_all_ones:
-; RV64:       # %bb.0:
-; RV64-NEXT:    li a1, 32
-; RV64-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
-; RV64-NEXT:    vle32.v v8, (a0)
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v32i32_all_ones:
-; RV32:       # %bb.0:
-; RV32-NEXT:    li a1, 32
-; RV32-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
-; RV32-NEXT:    vle32.v v8, (a0)
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v32i32_all_ones:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    li a1, 32
+; CHECK-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
+; CHECK-NEXT:    vle32.v v8, (a0)
+; CHECK-NEXT:    ret
   %res = call <32 x i32> @llvm.masked.expandload.v32i32(ptr align 4 %base, <32 x i1> splat (i1 true), <32 x i32> %passthru)
   ret <32 x i32> %res
 }
 
 define <64 x i32> @test_expandload_v64i32(ptr %base, <64 x i1> %mask, <64 x i32> %passthru) {
-; RV64-LABEL: test_expandload_v64i32:
-; RV64:       # %bb.0:
-; RV64-NEXT:    addi sp, sp, -16
-; RV64-NEXT:    .cfi_def_cfa_offset 16
-; RV64-NEXT:    csrr a1, vlenb
-; RV64-NEXT:    slli a1, a1, 4
-; RV64-NEXT:    sub sp, sp, a1
-; RV64-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
-; RV64-NEXT:    csrr a1, vlenb
-; RV64-NEXT:    slli a1, a1, 3
-; RV64-NEXT:    add a1, sp, a1
-; RV64-NEXT:    addi a1, a1, 16
-; RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
-; RV64-NEXT:    vmv1r.v v7, v0
-; RV64-NEXT:    vsetivli zero, 4, e8, mf2, ta, ma
-; RV64-NEXT:    vslidedown.vi v0, v0, 4
-; RV64-NEXT:    li a1, 32
-; RV64-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
-; RV64-NEXT:    viota.m v16, v0
-; RV64-NEXT:    vsll.vi v16, v16, 2, v0.t
-; RV64-NEXT:    addi a1, sp, 16
-; RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
-; RV64-NEXT:    vmv.x.s a1, v7
-; RV64-NEXT:    cpopw a1, a1
-; RV64-NEXT:    slli a1, a1, 2
-; RV64-NEXT:    add a1, a0, a1
-; RV64-NEXT:    csrr a2, vlenb
-; RV64-NEXT:    slli a2, a2, 3
-; RV64-NEXT:    add a2, sp, a2
-; RV64-NEXT:    addi a2, a2, 16
-; RV64-NEXT:    vl8r.v v16, (a2) # Unknown-size Folded Reload
-; RV64-NEXT:    addi a2, sp, 16
-; RV64-NEXT:    vl8r.v v24, (a2) # Unknown-size Folded Reload
-; RV64-NEXT:    vsetvli zero, zero, e32, m8, ta, mu
-; RV64-NEXT:    vluxei32.v v16, (a1), v24, v0.t
-; RV64-NEXT:    viota.m v24, v7
-; RV64-NEXT:    vmv1r.v v0, v7
-; RV64-NEXT:    vsetvli zero, zero, e32, m8, ta, ma
-; RV64-NEXT:    vsll.vi v24, v24, 2, v0.t
-; RV64-NEXT:    vsetvli zero, zero, e32, m8, ta, mu
-; RV64-NEXT:    vluxei32.v v8, (a0), v24, v0.t
-; RV64-NEXT:    csrr a0, vlenb
-; RV64-NEXT:    slli a0, a0, 4
-; RV64-NEXT:    add sp, sp, a0
-; RV64-NEXT:    addi sp, sp, 16
-; RV64-NEXT:    ret
+; CHECK-RV32-LABEL: test_expandload_v64i32:
+; CHECK-RV32:       # %bb.0:
+; CHECK-RV32-NEXT:    addi sp, sp, -16
+; CHECK-RV32-NEXT:    .cfi_def_cfa_offset 16
+; CHECK-RV32-NEXT:    csrr a1, vlenb
+; CHECK-RV32-NEXT:    slli a1, a1, 4
+; CHECK-RV32-NEXT:    sub sp, sp, a1
+; CHECK-RV32-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
+; CHECK-RV32-NEXT:    csrr a1, vlenb
+; CHECK-RV32-NEXT:    slli a1, a1, 3
+; CHECK-RV32-NEXT:    add a1, sp, a1
+; CHECK-RV32-NEXT:    addi a1, a1, 16
+; CHECK-RV32-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
+; CHECK-RV32-NEXT:    vmv1r.v v7, v0
+; CHECK-RV32-NEXT:    vsetivli zero, 4, e8, mf2, ta, ma
+; CHECK-RV32-NEXT:    vslidedown.vi v0, v0, 4
+; CHECK-RV32-NEXT:    li a1, 32
+; CHECK-RV32-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
+; CHECK-RV32-NEXT:    viota.m v16, v0
+; CHECK-RV32-NEXT:    vsll.vi v16, v16, 2, v0.t
+; CHECK-RV32-NEXT:    addi a1, sp, 16
+; CHECK-RV32-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
+; CHECK-RV32-NEXT:    vmv.x.s a1, v7
+; CHECK-RV32-NEXT:    cpop a1, a1
+; CHECK-RV32-NEXT:    slli a1, a1, 2
+; CHECK-RV32-NEXT:    add a1, a0, a1
+; CHECK-RV32-NEXT:    csrr a2, vlenb
+; CHECK-RV32-NEXT:    slli a2, a2, 3
+; CHECK-RV32-NEXT:    add a2, sp, a2
+; CHECK-RV32-NEXT:    addi a2, a2, 16
+; CHECK-RV32-NEXT:    vl8r.v v16, (a2) # Unknown-size Folded Reload
+; CHECK-RV32-NEXT:    addi a2, sp, 16
+; CHECK-RV32-NEXT:    vl8r.v v24, (a2) # Unknown-size Folded Reload
+; CHECK-RV32-NEXT:    vsetvli zero, zero, e32, m8, ta, mu
+; CHECK-RV32-NEXT:    vluxei32.v v16, (a1), v24, v0.t
+; CHECK-RV32-NEXT:    viota.m v24, v7
+; CHECK-RV32-NEXT:    vmv1r.v v0, v7
+; CHECK-RV32-NEXT:    vsetvli zero, zero, e32, m8, ta, ma
+; CHECK-RV32-NEXT:    vsll.vi v24, v24, 2, v0.t
+; CHECK-RV32-NEXT:    vsetvli zero, zero, e32, m8, ta, mu
+; CHECK-RV32-NEXT:    vluxei32.v v8, (a0), v24, v0.t
+; CHECK-RV32-NEXT:    csrr a0, vlenb
+; CHECK-RV32-NEXT:    slli a0, a0, 4
+; CHECK-RV32-NEXT:    add sp, sp, a0
+; CHECK-RV32-NEXT:    addi sp, sp, 16
+; CHECK-RV32-NEXT:    ret
 ;
-; RV32-LABEL: test_expandload_v64i32:
-; RV32:       # %bb.0:
-; RV32-NEXT:    addi sp, sp, -16
-; RV32-NEXT:    .cfi_def_cfa_offset 16
-; RV32-NEXT:    csrr a1, vlenb
-; RV32-NEXT:    slli a1, a1, 4
-; RV32-NEXT:    sub sp, sp, a1
-; RV32-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
-; RV32-NEXT:    csrr a1, vlenb
-; RV32-NEXT:    slli a1, a1, 3
-; RV32-NEXT:    add a1, sp, a1
-; RV32-NEXT:    addi a1, a1, 16
-; RV32-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
-; RV32-NEXT:    vmv1r.v v7, v0
-; RV32-NEXT:    vsetivli zero, 4, e8, mf2, ta, ma
-; RV32-NEXT:    vslidedown.vi v0, v0, 4
-; RV32-NEXT:    li a1, 32
-; RV32-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
-; RV32-NEXT:    viota.m v16, v0
-; RV32-NEXT:    vsll.vi v16, v16, 2, v0.t
-; RV32-NEXT:    addi a1, sp, 16
-; RV32-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
-; RV32-NEXT:    vmv.x.s a1, v7
-; RV32-NEXT:    cpop a1, a1
-; RV32-NEXT:    slli a1, a1, 2
-; RV32-NEXT:    add a1, a0, a1
-; RV32-NEXT:    csrr a2, vlenb
-; RV32-NEXT:    slli a2, a2, 3
-; RV32-NEXT:    add a2, sp, a2
-; RV32-NEXT:    addi a2, a2, 16
-; RV32-NEXT:    vl8r.v v16, (a2) # Unknown-size Folded Reload
-; RV32-NEXT:    addi a2, sp, 16
-; RV32-NEXT:    vl8r.v v24, (a2) # Unknown-size Folded Reload
-; RV32-NEXT:    vsetvli zero, zero, e32, m8, ta, mu
-; RV32-NEXT:    vluxei32.v v16, (a1), v24, v0.t
-; RV32-NEXT:    viota.m v24, v7
-; RV32-NEXT:    vmv1r.v v0, v7
-; RV32-NEXT:    vsetvli zero, zero, e32, m8, ta, ma
-; RV32-NEXT:    vsll.vi v24, v24, 2, v0.t
-; RV32-NEXT:    vsetvli zero, zero, e32, m8, ta, mu
-; RV32-NEXT:    vluxei32.v v8, (a0), v24, v0.t
-; RV32-NEXT:    csrr a0, vlenb
-; RV32-NEXT:    slli a0, a0, 4
-; RV32-NEXT:    add sp, sp, a0
-; RV32-NEXT:    addi sp, sp, 16
-; RV32-NEXT:    ret
+; CHECK-RV64-LABEL: test_expandload_v64i32:
+; CHECK-RV64:       # %bb.0:
+; CHECK-RV64-NEXT:    addi sp, sp, -16
+; CHECK-RV64-NEXT:    .cfi_def_cfa_offset 16
+; CHECK-RV64-NEXT:    csrr a1, vlenb
+; CHECK-RV64-NEXT:    slli a1, a1, 4
+; CHECK-RV64-NEXT:    sub sp, sp, a1
+; CHECK-RV64-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
+; CHECK-RV64-NEXT:    csrr a1, vlenb
+; CHECK-RV64-NEXT:    slli a1, a1, 3
+; CHECK-RV64-NEXT:    add a1, sp, a1
+; CHECK-RV64-NEXT:    addi a1, a1, 16
+; CHECK-RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
+; CHECK-RV64-NEXT:    vmv1r.v v7, v0
+; CHECK-RV64-NEXT:    vsetivli zero, 4, e8, mf2, ta, ma
+; CHECK-RV64-NEXT:    vslidedown.vi v0, v0, 4
+; CHECK-RV64-NEXT:    li a1, 32
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
+; CHECK-RV64-NEXT:    viota.m v16, v0
+; CHECK-RV64-NEXT:    vsll.vi v16, v16, 2, v0.t
+; CHECK-RV64-NEXT:    addi a1, sp, 16
+; CHECK-RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
+; CHECK-RV64-NEXT:    vmv.x.s a1, v7
+; CHECK-RV64-NEXT:    cpopw a1, a1
+; CHECK-RV64-NEXT:    slli a1, a1, 2
+; CHECK-RV64-NEXT:    add a1, a0, a1
+; CHECK-RV64-NEXT:    csrr a2, vlenb
+; CHECK-RV64-NEXT:    slli a2, a2, 3
+; CHECK-RV64-NEXT:    add a2, sp, a2
+; CHECK-RV64-NEXT:    addi a2, a2, 16
+; CHECK-RV64-NEXT:    vl8r.v v16, (a2) # Unknown-size Folded Reload
+; CHECK-RV64-NEXT:    addi a2, sp, 16
+; CHECK-RV64-NEXT:    vl8r.v v24, (a2) # Unknown-size Folded Reload
+; CHECK-RV64-NEXT:    vsetvli zero, zero, e32, m8, ta, mu
+; CHECK-RV64-NEXT:    vluxei32.v v16, (a1), v24, v0.t
+; CHECK-RV64-NEXT:    viota.m v24, v7
+; CHECK-RV64-NEXT:    vmv1r.v v0, v7
+; CHECK-RV64-NEXT:    vsetvli zero, zero, e32, m8, ta, ma
+; CHECK-RV64-NEXT:    vsll.vi v24, v24, 2, v0.t
+; CHECK-RV64-NEXT:    vsetvli zero, zero, e32, m8, ta, mu
+; CHECK-RV64-NEXT:    vluxei32.v v8, (a0), v24, v0.t
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    slli a0, a0, 4
+; CHECK-RV64-NEXT:    add sp, sp, a0
+; CHECK-RV64-NEXT:    addi sp, sp, 16
+; CHECK-RV64-NEXT:    ret
   %res = call <64 x i32> @llvm.masked.expandload.v64i32(ptr align 4 %base, <64 x i1> %mask, <64 x i32> %passthru)
   ret <64 x i32> %res
 }
 
 define <64 x i32> @test_expandload_v64i32_all_ones(ptr %base, <64 x i32> %passthru) {
-; RV64-LABEL: test_expandload_v64i32_all_ones:
-; RV64:       # %bb.0:
-; RV64-NEXT:    li a1, 32
-; RV64-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
-; RV64-NEXT:    vle32.v v8, (a0)
-; RV64-NEXT:    addi a0, a0, 128
-; RV64-NEXT:    vle32.v v16, (a0)
-; RV64-NEXT:    ret
+; CHECK-RV32-LABEL: test_expandload_v64i32_all_ones:
+; CHECK-RV32:       # %bb.0:
+; CHECK-RV32-NEXT:    li a1, 32
+; CHECK-RV32-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
+; CHECK-RV32-NEXT:    vle32.v v8, (a0)
+; CHECK-RV32-NEXT:    vmset.m v16
+; CHECK-RV32-NEXT:    vmv.x.s a1, v16
+; CHECK-RV32-NEXT:    cpop a1, a1
+; CHECK-RV32-NEXT:    slli a1, a1, 2
+; CHECK-RV32-NEXT:    add a0, a0, a1
+; CHECK-RV32-NEXT:    vle32.v v16, (a0)
+; CHECK-RV32-NEXT:    ret
 ;
-; RV32-LABEL: test_expandload_v64i32_all_ones:
-; RV32:       # %bb.0:
-; RV32-NEXT:    li a1, 32
-; RV32-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
-; RV32-NEXT:    vle32.v v8, (a0)
-; RV32-NEXT:    vmset.m v16
-; RV32-NEXT:    vmv.x.s a1, v16
-; RV32-NEXT:    cpop a1, a1
-; RV32-NEXT:    slli a1, a1, 2
-; RV32-NEXT:    add a0, a0, a1
-; RV32-NEXT:    vle32.v v16, (a0)
-; RV32-NEXT:    ret
+; CHECK-RV64-LABEL: test_expandload_v64i32_all_ones:
+; CHECK-RV64:       # %bb.0:
+; CHECK-RV64-NEXT:    li a1, 32
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
+; CHECK-RV64-NEXT:    vle32.v v8, (a0)
+; CHECK-RV64-NEXT:    addi a0, a0, 128
+; CHECK-RV64-NEXT:    vle32.v v16, (a0)
+; CHECK-RV64-NEXT:    ret
   %res = call <64 x i32> @llvm.masked.expandload.v64i32(ptr align 4 %base, <64 x i1> splat (i1 true), <64 x i32> %passthru)
   ret <64 x i32> %res
 }
@@ -1253,290 +942,252 @@ declare <16 x i32> @llvm.masked.expandload.v16i32(ptr, <16 x i1>, <16 x i32>)
 declare <32 x i32> @llvm.masked.expandload.v32i32(ptr, <32 x i1>, <32 x i32>)
 declare <64 x i32> @llvm.masked.expandload.v64i32(ptr, <64 x i1>, <64 x i32>)
 
-; Compress + store for i64 type
+; Load + expand for i64 type
 
 define <1 x i64> @test_expandload_v1i64(ptr %base, <1 x i1> %mask, <1 x i64> %passthru) {
-; RV64-LABEL: test_expandload_v1i64:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; RV64-NEXT:    viota.m v9, v0
-; RV64-NEXT:    vsll.vi v9, v9, 3, v0.t
-; RV64-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
-; RV64-NEXT:    vluxei64.v v8, (a0), v9, v0.t
-; RV64-NEXT:    ret
+; CHECK-RV32-LABEL: test_expandload_v1i64:
+; CHECK-RV32:       # %bb.0:
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e32, mf2, ta, ma
+; CHECK-RV32-NEXT:    viota.m v9, v0
+; CHECK-RV32-NEXT:    vsll.vi v9, v9, 3, v0.t
+; CHECK-RV32-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
+; CHECK-RV32-NEXT:    vluxei32.v v8, (a0), v9, v0.t
+; CHECK-RV32-NEXT:    ret
 ;
-; RV32-LABEL: test_expandload_v1i64:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 1, e32, mf2, ta, ma
-; RV32-NEXT:    viota.m v9, v0
-; RV32-NEXT:    vsll.vi v9, v9, 3, v0.t
-; RV32-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
-; RV32-NEXT:    vluxei32.v v8, (a0), v9, v0.t
-; RV32-NEXT:    ret
+; CHECK-RV64-LABEL: test_expandload_v1i64:
+; CHECK-RV64:       # %bb.0:
+; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV64-NEXT:    viota.m v9, v0
+; CHECK-RV64-NEXT:    vsll.vi v9, v9, 3, v0.t
+; CHECK-RV64-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
+; CHECK-RV64-NEXT:    vluxei64.v v8, (a0), v9, v0.t
+; CHECK-RV64-NEXT:    ret
   %res = call <1 x i64> @llvm.masked.expandload.v1i64(ptr align 8 %base, <1 x i1> %mask, <1 x i64> %passthru)
   ret <1 x i64> %res
 }
 
 define <1 x i64> @test_expandload_v1i64_all_ones(ptr %base, <1 x i64> %passthru) {
-; RV64-LABEL: test_expandload_v1i64_all_ones:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; RV64-NEXT:    vle64.v v8, (a0)
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v1i64_all_ones:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; RV32-NEXT:    vle64.v v8, (a0)
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v1i64_all_ones:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-NEXT:    vle64.v v8, (a0)
+; CHECK-NEXT:    ret
   %res = call <1 x i64> @llvm.masked.expandload.v1i64(ptr align 8 %base, <1 x i1> splat (i1 true), <1 x i64> %passthru)
   ret <1 x i64> %res
 }
 
 define <2 x i64> @test_expandload_v2i64(ptr %base, <2 x i1> %mask, <2 x i64> %passthru) {
-; RV64-LABEL: test_expandload_v2i64:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 2, e64, m1, ta, ma
-; RV64-NEXT:    viota.m v9, v0
-; RV64-NEXT:    vsll.vi v9, v9, 3, v0.t
-; RV64-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
-; RV64-NEXT:    vluxei64.v v8, (a0), v9, v0.t
-; RV64-NEXT:    ret
+; CHECK-RV32-LABEL: test_expandload_v2i64:
+; CHECK-RV32:       # %bb.0:
+; CHECK-RV32-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
+; CHECK-RV32-NEXT:    viota.m v9, v0
+; CHECK-RV32-NEXT:    vsll.vi v9, v9, 3, v0.t
+; CHECK-RV32-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
+; CHECK-RV32-NEXT:    vluxei32.v v8, (a0), v9, v0.t
+; CHECK-RV32-NEXT:    ret
 ;
-; RV32-LABEL: test_expandload_v2i64:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
-; RV32-NEXT:    viota.m v9, v0
-; RV32-NEXT:    vsll.vi v9, v9, 3, v0.t
-; RV32-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
-; RV32-NEXT:    vluxei32.v v8, (a0), v9, v0.t
-; RV32-NEXT:    ret
+; CHECK-RV64-LABEL: test_expandload_v2i64:
+; CHECK-RV64:       # %bb.0:
+; CHECK-RV64-NEXT:    vsetivli zero, 2, e64, m1, ta, ma
+; CHECK-RV64-NEXT:    viota.m v9, v0
+; CHECK-RV64-NEXT:    vsll.vi v9, v9, 3, v0.t
+; CHECK-RV64-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
+; CHECK-RV64-NEXT:    vluxei64.v v8, (a0), v9, v0.t
+; CHECK-RV64-NEXT:    ret
   %res = call <2 x i64> @llvm.masked.expandload.v2i64(ptr align 8 %base, <2 x i1> %mask, <2 x i64> %passthru)
   ret <2 x i64> %res
 }
 
 define <2 x i64> @test_expandload_v2i64_all_ones(ptr %base, <2 x i64> %passthru) {
-; RV64-LABEL: test_expandload_v2i64_all_ones:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 2, e64, m1, ta, ma
-; RV64-NEXT:    vle64.v v8, (a0)
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v2i64_all_ones:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 2, e64, m1, ta, ma
-; RV32-NEXT:    vle64.v v8, (a0)
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v2i64_all_ones:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 2, e64, m1, ta, ma
+; CHECK-NEXT:    vle64.v v8, (a0)
+; CHECK-NEXT:    ret
   %res = call <2 x i64> @llvm.masked.expandload.v2i64(ptr align 8 %base, <2 x i1> splat (i1 true), <2 x i64> %passthru)
   ret <2 x i64> %res
 }
 
 define <4 x i64> @test_expandload_v4i64(ptr %base, <4 x i1> %mask, <4 x i64> %passthru) {
-; RV64-LABEL: test_expandload_v4i64:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 4, e64, m2, ta, ma
-; RV64-NEXT:    viota.m v10, v0
-; RV64-NEXT:    vsll.vi v10, v10, 3, v0.t
-; RV64-NEXT:    vsetvli zero, zero, e64, m2, ta, mu
-; RV64-NEXT:    vluxei64.v v8, (a0), v10, v0.t
-; RV64-NEXT:    ret
+; CHECK-RV32-LABEL: test_expandload_v4i64:
+; CHECK-RV32:       # %bb.0:
+; CHECK-RV32-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
+; CHECK-RV32-NEXT:    viota.m v10, v0
+; CHECK-RV32-NEXT:    vsll.vi v10, v10, 3, v0.t
+; CHECK-RV32-NEXT:    vsetvli zero, zero, e64, m2, ta, mu
+; CHECK-RV32-NEXT:    vluxei32.v v8, (a0), v10, v0.t
+; CHECK-RV32-NEXT:    ret
 ;
-; RV32-LABEL: test_expandload_v4i64:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
-; RV32-NEXT:    viota.m v10, v0
-; RV32-NEXT:    vsll.vi v10, v10, 3, v0.t
-; RV32-NEXT:    vsetvli zero, zero, e64, m2, ta, mu
-; RV32-NEXT:    vluxei32.v v8, (a0), v10, v0.t
-; RV32-NEXT:    ret
+; CHECK-RV64-LABEL: test_expandload_v4i64:
+; CHECK-RV64:       # %bb.0:
+; CHECK-RV64-NEXT:    vsetivli zero, 4, e64, m2, ta, ma
+; CHECK-RV64-NEXT:    viota.m v10, v0
+; CHECK-RV64-NEXT:    vsll.vi v10, v10, 3, v0.t
+; CHECK-RV64-NEXT:    vsetvli zero, zero, e64, m2, ta, mu
+; CHECK-RV64-NEXT:    vluxei64.v v8, (a0), v10, v0.t
+; CHECK-RV64-NEXT:    ret
   %res = call <4 x i64> @llvm.masked.expandload.v4i64(ptr align 8 %base, <4 x i1> %mask, <4 x i64> %passthru)
   ret <4 x i64> %res
 }
 
 define <4 x i64> @test_expandload_v4i64_all_ones(ptr %base, <4 x i64> %passthru) {
-; RV64-LABEL: test_expandload_v4i64_all_ones:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 4, e64, m2, ta, ma
-; RV64-NEXT:    vle64.v v8, (a0)
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v4i64_all_ones:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 4, e64, m2, ta, ma
-; RV32-NEXT:    vle64.v v8, (a0)
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v4i64_all_ones:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 4, e64, m2, ta, ma
+; CHECK-NEXT:    vle64.v v8, (a0)
+; CHECK-NEXT:    ret
   %res = call <4 x i64> @llvm.masked.expandload.v4i64(ptr align 8 %base, <4 x i1> splat (i1 true), <4 x i64> %passthru)
   ret <4 x i64> %res
 }
 
 define <8 x i64> @test_expandload_v8i64(ptr %base, <8 x i1> %mask, <8 x i64> %passthru) {
-; RV64-LABEL: test_expandload_v8i64:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 8, e64, m4, ta, ma
-; RV64-NEXT:    viota.m v12, v0
-; RV64-NEXT:    vsll.vi v12, v12, 3, v0.t
-; RV64-NEXT:    vsetvli zero, zero, e64, m4, ta, mu
-; RV64-NEXT:    vluxei64.v v8, (a0), v12, v0.t
-; RV64-NEXT:    ret
+; CHECK-RV32-LABEL: test_expandload_v8i64:
+; CHECK-RV32:       # %bb.0:
+; CHECK-RV32-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
+; CHECK-RV32-NEXT:    viota.m v12, v0
+; CHECK-RV32-NEXT:    vsll.vi v12, v12, 3, v0.t
+; CHECK-RV32-NEXT:    vsetvli zero, zero, e64, m4, ta, mu
+; CHECK-RV32-NEXT:    vluxei32.v v8, (a0), v12, v0.t
+; CHECK-RV32-NEXT:    ret
 ;
-; RV32-LABEL: test_expandload_v8i64:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
-; RV32-NEXT:    viota.m v12, v0
-; RV32-NEXT:    vsll.vi v12, v12, 3, v0.t
-; RV32-NEXT:    vsetvli zero, zero, e64, m4, ta, mu
-; RV32-NEXT:    vluxei32.v v8, (a0), v12, v0.t
-; RV32-NEXT:    ret
+; CHECK-RV64-LABEL: test_expandload_v8i64:
+; CHECK-RV64:       # %bb.0:
+; CHECK-RV64-NEXT:    vsetivli zero, 8, e64, m4, ta, ma
+; CHECK-RV64-NEXT:    viota.m v12, v0
+; CHECK-RV64-NEXT:    vsll.vi v12, v12, 3, v0.t
+; CHECK-RV64-NEXT:    vsetvli zero, zero, e64, m4, ta, mu
+; CHECK-RV64-NEXT:    vluxei64.v v8, (a0), v12, v0.t
+; CHECK-RV64-NEXT:    ret
   %res = call <8 x i64> @llvm.masked.expandload.v8i64(ptr align 8 %base, <8 x i1> %mask, <8 x i64> %passthru)
   ret <8 x i64> %res
 }
 
 define <8 x i64> @test_expandload_v8i64_all_ones(ptr %base, <8 x i64> %passthru) {
-; RV64-LABEL: test_expandload_v8i64_all_ones:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 8, e64, m4, ta, ma
-; RV64-NEXT:    vle64.v v8, (a0)
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v8i64_all_ones:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 8, e64, m4, ta, ma
-; RV32-NEXT:    vle64.v v8, (a0)
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v8i64_all_ones:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 8, e64, m4, ta, ma
+; CHECK-NEXT:    vle64.v v8, (a0)
+; CHECK-NEXT:    ret
   %res = call <8 x i64> @llvm.masked.expandload.v8i64(ptr align 8 %base, <8 x i1> splat (i1 true), <8 x i64> %passthru)
   ret <8 x i64> %res
 }
 
 define <16 x i64> @test_expandload_v16i64(ptr %base, <16 x i1> %mask, <16 x i64> %passthru) {
-; RV64-LABEL: test_expandload_v16i64:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 16, e64, m8, ta, ma
-; RV64-NEXT:    viota.m v16, v0
-; RV64-NEXT:    vsll.vi v16, v16, 3, v0.t
-; RV64-NEXT:    vsetvli zero, zero, e64, m8, ta, mu
-; RV64-NEXT:    vluxei64.v v8, (a0), v16, v0.t
-; RV64-NEXT:    ret
+; CHECK-RV32-LABEL: test_expandload_v16i64:
+; CHECK-RV32:       # %bb.0:
+; CHECK-RV32-NEXT:    vsetivli zero, 16, e32, m4, ta, ma
+; CHECK-RV32-NEXT:    viota.m v16, v0
+; CHECK-RV32-NEXT:    vsll.vi v16, v16, 3, v0.t
+; CHECK-RV32-NEXT:    vsetvli zero, zero, e64, m8, ta, mu
+; CHECK-RV32-NEXT:    vluxei32.v v8, (a0), v16, v0.t
+; CHECK-RV32-NEXT:    ret
 ;
-; RV32-LABEL: test_expandload_v16i64:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 16, e32, m4, ta, ma
-; RV32-NEXT:    viota.m v16, v0
-; RV32-NEXT:    vsll.vi v16, v16, 3, v0.t
-; RV32-NEXT:    vsetvli zero, zero, e64, m8, ta, mu
-; RV32-NEXT:    vluxei32.v v8, (a0), v16, v0.t
-; RV32-NEXT:    ret
+; CHECK-RV64-LABEL: test_expandload_v16i64:
+; CHECK-RV64:       # %bb.0:
+; CHECK-RV64-NEXT:    vsetivli zero, 16, e64, m8, ta, ma
+; CHECK-RV64-NEXT:    viota.m v16, v0
+; CHECK-RV64-NEXT:    vsll.vi v16, v16, 3, v0.t
+; CHECK-RV64-NEXT:    vsetvli zero, zero, e64, m8, ta, mu
+; CHECK-RV64-NEXT:    vluxei64.v v8, (a0), v16, v0.t
+; CHECK-RV64-NEXT:    ret
   %res = call <16 x i64> @llvm.masked.expandload.v16i64(ptr align 8 %base, <16 x i1> %mask, <16 x i64> %passthru)
   ret <16 x i64> %res
 }
 
 define <16 x i64> @test_expandload_v16i64_all_ones(ptr %base, <16 x i64> %passthru) {
-; RV64-LABEL: test_expandload_v16i64_all_ones:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 16, e64, m8, ta, ma
-; RV64-NEXT:    vle64.v v8, (a0)
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v16i64_all_ones:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 16, e64, m8, ta, ma
-; RV32-NEXT:    vle64.v v8, (a0)
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v16i64_all_ones:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 16, e64, m8, ta, ma
+; CHECK-NEXT:    vle64.v v8, (a0)
+; CHECK-NEXT:    ret
   %res = call <16 x i64> @llvm.masked.expandload.v16i64(ptr align 8 %base, <16 x i1> splat (i1 true), <16 x i64> %passthru)
   ret <16 x i64> %res
 }
 
 define <32 x i64> @test_expandload_v32i64(ptr %base, <32 x i1> %mask, <32 x i64> %passthru) {
-; RV64-LABEL: test_expandload_v32i64:
-; RV64:       # %bb.0:
-; RV64-NEXT:    addi sp, sp, -16
-; RV64-NEXT:    .cfi_def_cfa_offset 16
-; RV64-NEXT:    csrr a1, vlenb
-; RV64-NEXT:    slli a1, a1, 4
-; RV64-NEXT:    sub sp, sp, a1
-; RV64-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
-; RV64-NEXT:    csrr a1, vlenb
-; RV64-NEXT:    slli a1, a1, 3
-; RV64-NEXT:    add a1, sp, a1
-; RV64-NEXT:    addi a1, a1, 16
-; RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
-; RV64-NEXT:    vmv1r.v v7, v0
-; RV64-NEXT:    vsetivli zero, 2, e8, mf4, ta, ma
-; RV64-NEXT:    vslidedown.vi v0, v0, 2
-; RV64-NEXT:    vsetivli zero, 16, e64, m8, ta, ma
-; RV64-NEXT:    viota.m v16, v0
-; RV64-NEXT:    vsll.vi v16, v16, 3, v0.t
-; RV64-NEXT:    addi a1, sp, 16
-; RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
-; RV64-NEXT:    vsetvli zero, zero, e16, m2, ta, ma
-; RV64-NEXT:    vmv.x.s a1, v7
-; RV64-NEXT:    zext.h a1, a1
-; RV64-NEXT:    cpopw a1, a1
-; RV64-NEXT:    slli a1, a1, 3
-; RV64-NEXT:    add a1, a0, a1
-; RV64-NEXT:    csrr a2, vlenb
-; RV64-NEXT:    slli a2, a2, 3
-; RV64-NEXT:    add a2, sp, a2
-; RV64-NEXT:    addi a2, a2, 16
-; RV64-NEXT:    vl8r.v v16, (a2) # Unknown-size Folded Reload
-; RV64-NEXT:    addi a2, sp, 16
-; RV64-NEXT:    vl8r.v v24, (a2) # Unknown-size Folded Reload
-; RV64-NEXT:    vsetvli zero, zero, e64, m8, ta, mu
-; RV64-NEXT:    vluxei64.v v16, (a1), v24, v0.t
-; RV64-NEXT:    viota.m v24, v7
-; RV64-NEXT:    vmv1r.v v0, v7
-; RV64-NEXT:    vsetvli zero, zero, e64, m8, ta, ma
-; RV64-NEXT:    vsll.vi v24, v24, 3, v0.t
-; RV64-NEXT:    vsetvli zero, zero, e64, m8, ta, mu
-; RV64-NEXT:    vluxei64.v v8, (a0), v24, v0.t
-; RV64-NEXT:    csrr a0, vlenb
-; RV64-NEXT:    slli a0, a0, 4
-; RV64-NEXT:    add sp, sp, a0
-; RV64-NEXT:    addi sp, sp, 16
-; RV64-NEXT:    ret
+; CHECK-RV32-LABEL: test_expandload_v32i64:
+; CHECK-RV32:       # %bb.0:
+; CHECK-RV32-NEXT:    vmv1r.v v24, v0
+; CHECK-RV32-NEXT:    vsetivli zero, 2, e8, mf4, ta, ma
+; CHECK-RV32-NEXT:    vslidedown.vi v0, v0, 2
+; CHECK-RV32-NEXT:    vsetivli zero, 16, e32, m4, ta, ma
+; CHECK-RV32-NEXT:    viota.m v28, v0
+; CHECK-RV32-NEXT:    vsll.vi v28, v28, 3, v0.t
+; CHECK-RV32-NEXT:    vsetvli zero, zero, e16, m2, ta, ma
+; CHECK-RV32-NEXT:    vmv.x.s a1, v24
+; CHECK-RV32-NEXT:    zext.h a1, a1
+; CHECK-RV32-NEXT:    cpop a1, a1
+; CHECK-RV32-NEXT:    slli a1, a1, 3
+; CHECK-RV32-NEXT:    add a1, a0, a1
+; CHECK-RV32-NEXT:    vsetvli zero, zero, e64, m8, ta, mu
+; CHECK-RV32-NEXT:    vluxei32.v v16, (a1), v28, v0.t
+; CHECK-RV32-NEXT:    vsetvli zero, zero, e32, m4, ta, ma
+; CHECK-RV32-NEXT:    viota.m v28, v24
+; CHECK-RV32-NEXT:    vmv1r.v v0, v24
+; CHECK-RV32-NEXT:    vsll.vi v28, v28, 3, v0.t
+; CHECK-RV32-NEXT:    vsetvli zero, zero, e64, m8, ta, mu
+; CHECK-RV32-NEXT:    vluxei32.v v8, (a0), v28, v0.t
+; CHECK-RV32-NEXT:    ret
 ;
-; RV32-LABEL: test_expandload_v32i64:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vmv1r.v v24, v0
-; RV32-NEXT:    vsetivli zero, 2, e8, mf4, ta, ma
-; RV32-NEXT:    vslidedown.vi v0, v0, 2
-; RV32-NEXT:    vsetivli zero, 16, e32, m4, ta, ma
-; RV32-NEXT:    viota.m v28, v0
-; RV32-NEXT:    vsll.vi v28, v28, 3, v0.t
-; RV32-NEXT:    vsetvli zero, zero, e16, m2, ta, ma
-; RV32-NEXT:    vmv.x.s a1, v24
-; RV32-NEXT:    zext.h a1, a1
-; RV32-NEXT:    cpop a1, a1
-; RV32-NEXT:    slli a1, a1, 3
-; RV32-NEXT:    add a1, a0, a1
-; RV32-NEXT:    vsetvli zero, zero, e64, m8, ta, mu
-; RV32-NEXT:    vluxei32.v v16, (a1), v28, v0.t
-; RV32-NEXT:    vsetvli zero, zero, e32, m4, ta, ma
-; RV32-NEXT:    viota.m v28, v24
-; RV32-NEXT:    vmv1r.v v0, v24
-; RV32-NEXT:    vsll.vi v28, v28, 3, v0.t
-; RV32-NEXT:    vsetvli zero, zero, e64, m8, ta, mu
-; RV32-NEXT:    vluxei32.v v8, (a0), v28, v0.t
-; RV32-NEXT:    ret
+; CHECK-RV64-LABEL: test_expandload_v32i64:
+; CHECK-RV64:       # %bb.0:
+; CHECK-RV64-NEXT:    addi sp, sp, -16
+; CHECK-RV64-NEXT:    .cfi_def_cfa_offset 16
+; CHECK-RV64-NEXT:    csrr a1, vlenb
+; CHECK-RV64-NEXT:    slli a1, a1, 4
+; CHECK-RV64-NEXT:    sub sp, sp, a1
+; CHECK-RV64-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
+; CHECK-RV64-NEXT:    csrr a1, vlenb
+; CHECK-RV64-NEXT:    slli a1, a1, 3
+; CHECK-RV64-NEXT:    add a1, sp, a1
+; CHECK-RV64-NEXT:    addi a1, a1, 16
+; CHECK-RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
+; CHECK-RV64-NEXT:    vmv1r.v v7, v0
+; CHECK-RV64-NEXT:    vsetivli zero, 2, e8, mf4, ta, ma
+; CHECK-RV64-NEXT:    vslidedown.vi v0, v0, 2
+; CHECK-RV64-NEXT:    vsetivli zero, 16, e64, m8, ta, ma
+; CHECK-RV64-NEXT:    viota.m v16, v0
+; CHECK-RV64-NEXT:    vsll.vi v16, v16, 3, v0.t
+; CHECK-RV64-NEXT:    addi a1, sp, 16
+; CHECK-RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
+; CHECK-RV64-NEXT:    vsetvli zero, zero, e16, m2, ta, ma
+; CHECK-RV64-NEXT:    vmv.x.s a1, v7
+; CHECK-RV64-NEXT:    zext.h a1, a1
+; CHECK-RV64-NEXT:    cpopw a1, a1
+; CHECK-RV64-NEXT:    slli a1, a1, 3
+; CHECK-RV64-NEXT:    add a1, a0, a1
+; CHECK-RV64-NEXT:    csrr a2, vlenb
+; CHECK-RV64-NEXT:    slli a2, a2, 3
+; CHECK-RV64-NEXT:    add a2, sp, a2
+; CHECK-RV64-NEXT:    addi a2, a2, 16
+; CHECK-RV64-NEXT:    vl8r.v v16, (a2) # Unknown-size Folded Reload
+; CHECK-RV64-NEXT:    addi a2, sp, 16
+; CHECK-RV64-NEXT:    vl8r.v v24, (a2) # Unknown-size Folded Reload
+; CHECK-RV64-NEXT:    vsetvli zero, zero, e64, m8, ta, mu
+; CHECK-RV64-NEXT:    vluxei64.v v16, (a1), v24, v0.t
+; CHECK-RV64-NEXT:    viota.m v24, v7
+; CHECK-RV64-NEXT:    vmv1r.v v0, v7
+; CHECK-RV64-NEXT:    vsetvli zero, zero, e64, m8, ta, ma
+; CHECK-RV64-NEXT:    vsll.vi v24, v24, 3, v0.t
+; CHECK-RV64-NEXT:    vsetvli zero, zero, e64, m8, ta, mu
+; CHECK-RV64-NEXT:    vluxei64.v v8, (a0), v24, v0.t
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    slli a0, a0, 4
+; CHECK-RV64-NEXT:    add sp, sp, a0
+; CHECK-RV64-NEXT:    addi sp, sp, 16
+; CHECK-RV64-NEXT:    ret
   %res = call <32 x i64> @llvm.masked.expandload.v32i64(ptr align 8 %base, <32 x i1> %mask, <32 x i64> %passthru)
   ret <32 x i64> %res
 }
 
 define <32 x i64> @test_expandload_v32i64_all_ones(ptr %base, <32 x i64> %passthru) {
-; RV64-LABEL: test_expandload_v32i64_all_ones:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 16, e64, m8, ta, ma
-; RV64-NEXT:    vle64.v v8, (a0)
-; RV64-NEXT:    addi a0, a0, 128
-; RV64-NEXT:    vle64.v v16, (a0)
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v32i64_all_ones:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 16, e64, m8, ta, ma
-; RV32-NEXT:    vle64.v v8, (a0)
-; RV32-NEXT:    addi a0, a0, 128
-; RV32-NEXT:    vle64.v v16, (a0)
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v32i64_all_ones:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 16, e64, m8, ta, ma
+; CHECK-NEXT:    vle64.v v8, (a0)
+; CHECK-NEXT:    addi a0, a0, 128
+; CHECK-NEXT:    vle64.v v16, (a0)
+; CHECK-NEXT:    ret
   %res = call <32 x i64> @llvm.masked.expandload.v32i64(ptr align 8 %base, <32 x i1> splat (i1 true), <32 x i64> %passthru)
   ret <32 x i64> %res
 }
@@ -1548,24 +1199,17 @@ declare <8 x i64> @llvm.masked.expandload.v8i64(ptr, <8 x i1>, <8 x i64>)
 declare <16 x i64> @llvm.masked.expandload.v16i64(ptr, <16 x i1>, <16 x i64>)
 declare <32 x i64> @llvm.masked.expandload.v32i64(ptr, <32 x i1>, <32 x i64>)
 
+; Tests that will exceed the range of i8 index.
+
 define <512 x i8> @test_expandload_v512i8(ptr %base, <512 x i1> %mask, <512 x i8> %passthru) "target-features"="+zvl1024b" {
-; RV64-LABEL: test_expandload_v512i8:
-; RV64:       # %bb.0:
-; RV64-NEXT:    li a1, 512
-; RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
-; RV64-NEXT:    viota.m v16, v0
-; RV64-NEXT:    vsetvli zero, zero, e8, m4, ta, mu
-; RV64-NEXT:    vluxei16.v v8, (a0), v16, v0.t
-; RV64-NEXT:    ret
-;
-; RV32-LABEL: test_expandload_v512i8:
-; RV32:       # %bb.0:
-; RV32-NEXT:    li a1, 512
-; RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
-; RV32-NEXT:    viota.m v16, v0
-; RV32-NEXT:    vsetvli zero, zero, e8, m4, ta, mu
-; RV32-NEXT:    vluxei16.v v8, (a0), v16, v0.t
-; RV32-NEXT:    ret
+; CHECK-LABEL: test_expandload_v512i8:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    li a1, 512
+; CHECK-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; CHECK-NEXT:    viota.m v16, v0
+; CHECK-NEXT:    vsetvli zero, zero, e8, m4, ta, mu
+; CHECK-NEXT:    vluxei16.v v8, (a0), v16, v0.t
+; CHECK-NEXT:    ret
   %res = call <512 x i8> @llvm.masked.expandload.v512i8(ptr align 1 %base, <512 x i1> %mask, <512 x i8> %passthru)
   ret <512 x i8> %res
 }

>From 746a86a50be2ad8a8cbbc7e26f5888a8e3422b32 Mon Sep 17 00:00:00 2001
From: Wang Pengcheng <wangpengcheng.pp at bytedance.com>
Date: Tue, 6 Aug 2024 18:54:13 +0800
Subject: [PATCH 05/15] Split LMUL 8 case manually

---
 llvm/lib/Target/RISCV/RISCVISelLowering.cpp | 96 +++++++++++++++++++-
 llvm/lib/Target/RISCV/RISCVISelLowering.h   |  2 +
 llvm/test/CodeGen/RISCV/rvv/expandload.ll   | 97 ++++++++++++++++++++-
 3 files changed, 188 insertions(+), 7 deletions(-)

diff --git a/llvm/lib/Target/RISCV/RISCVISelLowering.cpp b/llvm/lib/Target/RISCV/RISCVISelLowering.cpp
index 33b6cec3e17e76..c129a0d260ed92 100644
--- a/llvm/lib/Target/RISCV/RISCVISelLowering.cpp
+++ b/llvm/lib/Target/RISCV/RISCVISelLowering.cpp
@@ -11096,6 +11096,95 @@ RISCVTargetLowering::lowerFixedLengthVectorStoreToRVV(SDValue Op,
       Store->getMemoryVT(), Store->getMemOperand());
 }
 
+SDValue RISCVTargetLowering::splitMaskedExpandingLoad(SDValue Op,
+                                                      SelectionDAG &DAG) const {
+  SDLoc DL(Op);
+  MVT VT = Op.getSimpleValueType();
+  auto *MLD = cast<MaskedLoadSDNode>(Op);
+  MVT XLenVT = Subtarget.getXLenVT();
+  auto [LoVT, HiVT] = DAG.GetSplitDestVTs(MLD->getValueType(0));
+
+  SDValue Chain = MLD->getChain();
+  SDValue Ptr = MLD->getBasePtr();
+  SDValue Offset = MLD->getOffset();
+  SDValue Mask = MLD->getMask();
+  SDValue Passthru = MLD->getPassThru();
+  Align Alignment = MLD->getOriginalAlign();
+  ISD::LoadExtType ExtType = MLD->getExtensionType();
+
+  // Split Mask operand
+  auto [MaskLo, MaskHi] = DAG.SplitVector(Mask, DL);
+
+  EVT MemoryVT = MLD->getMemoryVT();
+  bool HiIsEmpty = false;
+  auto [LoMemVT, HiMemVT] =
+      DAG.GetDependentSplitDestVTs(MemoryVT, LoVT, &HiIsEmpty);
+
+  // Split PassThru operand
+  auto [PassthruLo, PassthruHi] = DAG.SplitVector(Passthru, DL);
+
+  MachineMemOperand *MMO = DAG.getMachineFunction().getMachineMemOperand(
+      MLD->getPointerInfo(), MachineMemOperand::MOLoad,
+      LocationSize::beforeOrAfterPointer(), Alignment, MLD->getAAInfo(),
+      MLD->getRanges());
+
+  SDValue Lo, Hi;
+  Lo = DAG.getMaskedLoad(LoVT, DL, Chain, Ptr, Offset, MaskLo, PassthruLo,
+                         LoMemVT, MMO, MLD->getAddressingMode(), ExtType,
+                         /*IsExpanding=*/true);
+
+  if (HiIsEmpty) {
+    // The hi masked load has zero storage size. We therefore simply set it to
+    // the low masked load and rely on subsequent removal from the chain.
+    Hi = Lo;
+  } else {
+    EVT MaskVT = MaskLo.getValueType();
+    SDValue VL = DAG.getConstant(MaskVT.getVectorNumElements(), DL, XLenVT);
+
+    MVT MaskContainerVT =
+        getContainerForFixedLengthVector(MaskVT.getSimpleVT());
+    MaskLo = convertToScalableVector(MaskContainerVT, MaskLo, DAG, Subtarget);
+
+    SDValue Increment = DAG.getNode(
+        RISCVISD::VCPOP_VL, DL, XLenVT, MaskLo,
+        getAllOnesMask(MaskLo.getSimpleValueType(), VL, DL, DAG), VL);
+
+    // Scale is an element size in bytes.
+    SDValue Scale =
+        DAG.getConstant(LoMemVT.getScalarSizeInBits() / 8, DL, XLenVT);
+    Increment = DAG.getNode(ISD::MUL, DL, XLenVT, Increment, Scale);
+
+    Ptr = DAG.getNode(ISD::ADD, DL, XLenVT, Ptr, Increment);
+
+    MachinePointerInfo MPI;
+    if (LoMemVT.isScalableVector())
+      MPI = MachinePointerInfo(MLD->getPointerInfo().getAddrSpace());
+    else
+      MPI = MLD->getPointerInfo().getWithOffset(
+          LoMemVT.getStoreSize().getFixedValue());
+
+    MMO = DAG.getMachineFunction().getMachineMemOperand(
+        MPI, MachineMemOperand::MOLoad, LocationSize::beforeOrAfterPointer(),
+        Alignment, MLD->getAAInfo(), MLD->getRanges());
+
+    Hi = DAG.getMaskedLoad(HiVT, DL, Chain, Ptr, Offset, MaskHi, PassthruHi,
+                           HiMemVT, MMO, MLD->getAddressingMode(), ExtType,
+                           /*IsExpanding=*/true);
+  }
+
+  // Build a factor node to remember that this load is independent of the
+  // other one.
+  Chain = DAG.getNode(ISD::TokenFactor, DL, MVT::Other, Lo.getValue(1),
+                      Hi.getValue(1));
+
+  // Legalize the chain result - switch anything that used the old chain to
+  // use the new one.
+  DAG.ReplaceAllUsesOfValueWith(SDValue(MLD, 1), Chain);
+
+  return DAG.getMergeValues(
+      {DAG.getNode(ISD::CONCAT_VECTORS, DL, VT, Lo, Hi), Chain}, DL);
+}
+
 SDValue RISCVTargetLowering::lowerMaskedLoad(SDValue Op,
                                              SelectionDAG &DAG) const {
   SDLoc DL(Op);
@@ -11151,9 +11240,10 @@ SDValue RISCVTargetLowering::lowerMaskedLoad(SDValue Op,
     // should change the element type of index vector to i16 to avoid overflow.
     if (IndexEltVT == MVT::i8 &&
         VT.getVectorElementCount().getKnownMinValue() > 256) {
-      // FIXME: Don't know how to make LMUL==8 case legal.
-      assert(getLMUL(IndexVT) != RISCVII::LMUL_8 &&
-             "We don't know how to lower LMUL==8 case");
+      // If this will result in illegal types, we split it into two loads.
+      if (getLMUL(IndexVT) == RISCVII::LMUL_8)
+        return splitMaskedExpandingLoad(Op, DAG);
+
       IndexVT = IndexVT.changeVectorElementType(MVT::i16);
     }
 
diff --git a/llvm/lib/Target/RISCV/RISCVISelLowering.h b/llvm/lib/Target/RISCV/RISCVISelLowering.h
index c3749447955330..81a1255ee9b0d2 100644
--- a/llvm/lib/Target/RISCV/RISCVISelLowering.h
+++ b/llvm/lib/Target/RISCV/RISCVISelLowering.h
@@ -995,6 +995,8 @@ class RISCVTargetLowering : public TargetLowering {
   SDValue lowerINIT_TRAMPOLINE(SDValue Op, SelectionDAG &DAG) const;
   SDValue lowerADJUST_TRAMPOLINE(SDValue Op, SelectionDAG &DAG) const;
 
+  SDValue splitMaskedExpandingLoad(SDValue Op, SelectionDAG &DAG) const;
+
   bool isEligibleForTailCallOptimization(
       CCState &CCInfo, CallLoweringInfo &CLI, MachineFunction &MF,
       const SmallVector<CCValAssign, 16> &ArgLocs) const;
diff --git a/llvm/test/CodeGen/RISCV/rvv/expandload.ll b/llvm/test/CodeGen/RISCV/rvv/expandload.ll
index eb862510a5bdfd..86e66ea335964b 100644
--- a/llvm/test/CodeGen/RISCV/rvv/expandload.ll
+++ b/llvm/test/CodeGen/RISCV/rvv/expandload.ll
@@ -1215,10 +1215,99 @@ define <512 x i8> @test_expandload_v512i8(ptr %base, <512 x i1> %mask, <512 x i8
 }
 
 ; FIXME: Don't know how to make it legal.
-; define <1024 x i8> @test_expandload_v1024i8(ptr %base, <1024 x i1> %mask, <1024 x i8> %passthru) "target-features"="+zvl1024b" {
-;   %res = call <1024 x i8> @llvm.masked.expandload.v1024i8(ptr align 1 %base, <1024 x i1> %mask, <1024 x i8> %passthru)
-;   ret <1024 x i8> %res
-; }
+define <1024 x i8> @test_expandload_v1024i8(ptr %base, <1024 x i1> %mask, <1024 x i8> %passthru) "target-features"="+zvl1024b" {
+; CHECK-RV32-LABEL: test_expandload_v1024i8:
+; CHECK-RV32:       # %bb.0:
+; CHECK-RV32-NEXT:    addi sp, sp, -16
+; CHECK-RV32-NEXT:    .cfi_def_cfa_offset 16
+; CHECK-RV32-NEXT:    csrr a1, vlenb
+; CHECK-RV32-NEXT:    slli a1, a1, 3
+; CHECK-RV32-NEXT:    sub sp, sp, a1
+; CHECK-RV32-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x08, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 8 * vlenb
+; CHECK-RV32-NEXT:    vmv1r.v v7, v0
+; CHECK-RV32-NEXT:    li a1, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a1, e8, m4, ta, ma
+; CHECK-RV32-NEXT:    vcpop.m a2, v0
+; CHECK-RV32-NEXT:    add a2, a0, a2
+; CHECK-RV32-NEXT:    li a3, 64
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vslidedown.vx v0, v0, a3
+; CHECK-RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; CHECK-RV32-NEXT:    viota.m v16, v0
+; CHECK-RV32-NEXT:    addi a3, sp, 16
+; CHECK-RV32-NEXT:    vs8r.v v16, (a3) # Unknown-size Folded Spill
+; CHECK-RV32-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
+; CHECK-RV32-NEXT:    vslidedown.vx v24, v8, a1
+; CHECK-RV32-NEXT:    vl8r.v v16, (a3) # Unknown-size Folded Reload
+; CHECK-RV32-NEXT:    vsetvli zero, a1, e8, m4, ta, mu
+; CHECK-RV32-NEXT:    vluxei16.v v24, (a2), v16, v0.t
+; CHECK-RV32-NEXT:    vsetvli zero, zero, e16, m8, ta, ma
+; CHECK-RV32-NEXT:    viota.m v16, v7
+; CHECK-RV32-NEXT:    vmv1r.v v0, v7
+; CHECK-RV32-NEXT:    vsetvli zero, zero, e8, m4, ta, mu
+; CHECK-RV32-NEXT:    vluxei16.v v8, (a0), v16, v0.t
+; CHECK-RV32-NEXT:    li a0, 1024
+; CHECK-RV32-NEXT:    vsetvli zero, a0, e8, m8, ta, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a1
+; CHECK-RV32-NEXT:    csrr a0, vlenb
+; CHECK-RV32-NEXT:    slli a0, a0, 3
+; CHECK-RV32-NEXT:    add sp, sp, a0
+; CHECK-RV32-NEXT:    addi sp, sp, 16
+; CHECK-RV32-NEXT:    ret
+;
+; CHECK-RV64-LABEL: test_expandload_v1024i8:
+; CHECK-RV64:       # %bb.0:
+; CHECK-RV64-NEXT:    addi sp, sp, -16
+; CHECK-RV64-NEXT:    .cfi_def_cfa_offset 16
+; CHECK-RV64-NEXT:    csrr a1, vlenb
+; CHECK-RV64-NEXT:    slli a1, a1, 4
+; CHECK-RV64-NEXT:    sub sp, sp, a1
+; CHECK-RV64-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
+; CHECK-RV64-NEXT:    vmv1r.v v7, v0
+; CHECK-RV64-NEXT:    li a1, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, ta, ma
+; CHECK-RV64-NEXT:    vcpop.m a2, v0
+; CHECK-RV64-NEXT:    add a2, a0, a2
+; CHECK-RV64-NEXT:    li a3, 64
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vslidedown.vx v0, v0, a3
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; CHECK-RV64-NEXT:    viota.m v24, v0
+; CHECK-RV64-NEXT:    csrr a3, vlenb
+; CHECK-RV64-NEXT:    slli a3, a3, 3
+; CHECK-RV64-NEXT:    add a3, sp, a3
+; CHECK-RV64-NEXT:    addi a3, a3, 16
+; CHECK-RV64-NEXT:    vs8r.v v24, (a3) # Unknown-size Folded Spill
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
+; CHECK-RV64-NEXT:    vslidedown.vx v16, v8, a1
+; CHECK-RV64-NEXT:    addi a3, sp, 16
+; CHECK-RV64-NEXT:    vs8r.v v16, (a3) # Unknown-size Folded Spill
+; CHECK-RV64-NEXT:    csrr a3, vlenb
+; CHECK-RV64-NEXT:    slli a3, a3, 3
+; CHECK-RV64-NEXT:    add a3, sp, a3
+; CHECK-RV64-NEXT:    addi a3, a3, 16
+; CHECK-RV64-NEXT:    vl8r.v v24, (a3) # Unknown-size Folded Reload
+; CHECK-RV64-NEXT:    addi a3, sp, 16
+; CHECK-RV64-NEXT:    vl8r.v v16, (a3) # Unknown-size Folded Reload
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, ta, mu
+; CHECK-RV64-NEXT:    vluxei16.v v16, (a2), v24, v0.t
+; CHECK-RV64-NEXT:    vmv.v.v v24, v16
+; CHECK-RV64-NEXT:    vsetvli zero, zero, e16, m8, ta, ma
+; CHECK-RV64-NEXT:    viota.m v16, v7
+; CHECK-RV64-NEXT:    vmv1r.v v0, v7
+; CHECK-RV64-NEXT:    vsetvli zero, zero, e8, m4, ta, mu
+; CHECK-RV64-NEXT:    vluxei16.v v8, (a0), v16, v0.t
+; CHECK-RV64-NEXT:    li a0, 1024
+; CHECK-RV64-NEXT:    vsetvli zero, a0, e8, m8, ta, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a1
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    slli a0, a0, 4
+; CHECK-RV64-NEXT:    add sp, sp, a0
+; CHECK-RV64-NEXT:    addi sp, sp, 16
+; CHECK-RV64-NEXT:    ret
+  %res = call <1024 x i8> @llvm.masked.expandload.v1024i8(ptr align 1 %base, <1024 x i1> %mask, <1024 x i8> %passthru)
+  ret <1024 x i8> %res
+}
 
 declare <512 x i8> @llvm.masked.expandload.v512i8(ptr, <512 x i1>, <512 x i8>)
 declare <1024 x i8> @llvm.masked.expandload.v1024i8(ptr, <1024 x i1>, <1024 x i8>)

>From ee0fd2718e7803d6936268e684ce0519ebfc7874 Mon Sep 17 00:00:00 2001
From: Wang Pengcheng <wangpengcheng.pp at bytedance.com>
Date: Tue, 6 Aug 2024 19:42:07 +0800
Subject: [PATCH 06/15] Revert "Split LMUL 8 case manually"

This reverts commit 661f7ac8c8bb0e1fa4e50632f30aef3b3b7d0ee1.
---
 llvm/lib/Target/RISCV/RISCVISelLowering.cpp | 96 +-------------------
 llvm/lib/Target/RISCV/RISCVISelLowering.h   |  2 -
 llvm/test/CodeGen/RISCV/rvv/expandload.ll   | 97 +--------------------
 3 files changed, 7 insertions(+), 188 deletions(-)

diff --git a/llvm/lib/Target/RISCV/RISCVISelLowering.cpp b/llvm/lib/Target/RISCV/RISCVISelLowering.cpp
index c129a0d260ed92..33b6cec3e17e76 100644
--- a/llvm/lib/Target/RISCV/RISCVISelLowering.cpp
+++ b/llvm/lib/Target/RISCV/RISCVISelLowering.cpp
@@ -11096,95 +11096,6 @@ RISCVTargetLowering::lowerFixedLengthVectorStoreToRVV(SDValue Op,
       Store->getMemoryVT(), Store->getMemOperand());
 }
 
-SDValue RISCVTargetLowering::splitMaskedExpandingLoad(SDValue Op,
-                                                      SelectionDAG &DAG) const {
-  SDLoc DL(Op);
-  MVT VT = Op.getSimpleValueType();
-  auto *MLD = cast<MaskedLoadSDNode>(Op);
-  MVT XLenVT = Subtarget.getXLenVT();
-  auto [LoVT, HiVT] = DAG.GetSplitDestVTs(MLD->getValueType(0));
-
-  SDValue Chain = MLD->getChain();
-  SDValue Ptr = MLD->getBasePtr();
-  SDValue Offset = MLD->getOffset();
-  SDValue Mask = MLD->getMask();
-  SDValue Passthru = MLD->getPassThru();
-  Align Alignment = MLD->getOriginalAlign();
-  ISD::LoadExtType ExtType = MLD->getExtensionType();
-
-  // Split Mask operand
-  auto [MaskLo, MaskHi] = DAG.SplitVector(Mask, DL);
-
-  EVT MemoryVT = MLD->getMemoryVT();
-  bool HiIsEmpty = false;
-  auto [LoMemVT, HiMemVT] =
-      DAG.GetDependentSplitDestVTs(MemoryVT, LoVT, &HiIsEmpty);
-
-  // Split PassThru operand
-  auto [PassthruLo, PassthruHi] = DAG.SplitVector(Passthru, DL);
-
-  MachineMemOperand *MMO = DAG.getMachineFunction().getMachineMemOperand(
-      MLD->getPointerInfo(), MachineMemOperand::MOLoad,
-      LocationSize::beforeOrAfterPointer(), Alignment, MLD->getAAInfo(),
-      MLD->getRanges());
-
-  SDValue Lo, Hi;
-  Lo = DAG.getMaskedLoad(LoVT, DL, Chain, Ptr, Offset, MaskLo, PassthruLo,
-                         LoMemVT, MMO, MLD->getAddressingMode(), ExtType,
-                         /*IsExpanding=*/true);
-
-  if (HiIsEmpty) {
-    // The hi masked load has zero storage size. We therefore simply set it to
-    // the low masked load and rely on subsequent removal from the chain.
-    Hi = Lo;
-  } else {
-    EVT MaskVT = MaskLo.getValueType();
-    SDValue VL = DAG.getConstant(MaskVT.getVectorNumElements(), DL, XLenVT);
-
-    MVT MaskContainerVT =
-        getContainerForFixedLengthVector(MaskVT.getSimpleVT());
-    MaskLo = convertToScalableVector(MaskContainerVT, MaskLo, DAG, Subtarget);
-
-    SDValue Increment = DAG.getNode(
-        RISCVISD::VCPOP_VL, DL, XLenVT, MaskLo,
-        getAllOnesMask(MaskLo.getSimpleValueType(), VL, DL, DAG), VL);
-
-    // Scale is an element size in bytes.
-    SDValue Scale =
-        DAG.getConstant(LoMemVT.getScalarSizeInBits() / 8, DL, XLenVT);
-    Increment = DAG.getNode(ISD::MUL, DL, XLenVT, Increment, Scale);
-
-    Ptr = DAG.getNode(ISD::ADD, DL, XLenVT, Ptr, Increment);
-
-    MachinePointerInfo MPI;
-    if (LoMemVT.isScalableVector())
-      MPI = MachinePointerInfo(MLD->getPointerInfo().getAddrSpace());
-    else
-      MPI = MLD->getPointerInfo().getWithOffset(
-          LoMemVT.getStoreSize().getFixedValue());
-
-    MMO = DAG.getMachineFunction().getMachineMemOperand(
-        MPI, MachineMemOperand::MOLoad, LocationSize::beforeOrAfterPointer(),
-        Alignment, MLD->getAAInfo(), MLD->getRanges());
-
-    Hi = DAG.getMaskedLoad(HiVT, DL, Chain, Ptr, Offset, MaskHi, PassthruHi,
-                           HiMemVT, MMO, MLD->getAddressingMode(), ExtType,
-                           /*IsExpanding=*/true);
-  }
-
-  // Build a factor node to remember that this load is independent of the
-  // other one.
-  Chain = DAG.getNode(ISD::TokenFactor, DL, MVT::Other, Lo.getValue(1),
-                      Hi.getValue(1));
-
-  // Legalize the chain result - switch anything that used the old chain to
-  // use the new one.
-  DAG.ReplaceAllUsesOfValueWith(SDValue(MLD, 1), Chain);
-
-  return DAG.getMergeValues(
-      {DAG.getNode(ISD::CONCAT_VECTORS, DL, VT, Lo, Hi), Chain}, DL);
-}
-
 SDValue RISCVTargetLowering::lowerMaskedLoad(SDValue Op,
                                              SelectionDAG &DAG) const {
   SDLoc DL(Op);
@@ -11240,10 +11151,9 @@ SDValue RISCVTargetLowering::lowerMaskedLoad(SDValue Op,
     // should change the element type of index vector to i16 to avoid overflow.
     if (IndexEltVT == MVT::i8 &&
         VT.getVectorElementCount().getKnownMinValue() > 256) {
-      // If this will result in illegal types, we split it into two loads.
-      if (getLMUL(IndexVT) == RISCVII::LMUL_8)
-        return splitMaskedExpandingLoad(Op, DAG);
-
+      // FIXME: Don't know how to make LMUL==8 case legal.
+      assert(getLMUL(IndexVT) != RISCVII::LMUL_8 &&
+             "We don't know how to lower LMUL==8 case");
       IndexVT = IndexVT.changeVectorElementType(MVT::i16);
     }
 
diff --git a/llvm/lib/Target/RISCV/RISCVISelLowering.h b/llvm/lib/Target/RISCV/RISCVISelLowering.h
index 81a1255ee9b0d2..c3749447955330 100644
--- a/llvm/lib/Target/RISCV/RISCVISelLowering.h
+++ b/llvm/lib/Target/RISCV/RISCVISelLowering.h
@@ -995,8 +995,6 @@ class RISCVTargetLowering : public TargetLowering {
   SDValue lowerINIT_TRAMPOLINE(SDValue Op, SelectionDAG &DAG) const;
   SDValue lowerADJUST_TRAMPOLINE(SDValue Op, SelectionDAG &DAG) const;
 
-  SDValue splitMaskedExpandingLoad(SDValue Op, SelectionDAG &DAG) const;
-
   bool isEligibleForTailCallOptimization(
       CCState &CCInfo, CallLoweringInfo &CLI, MachineFunction &MF,
       const SmallVector<CCValAssign, 16> &ArgLocs) const;
diff --git a/llvm/test/CodeGen/RISCV/rvv/expandload.ll b/llvm/test/CodeGen/RISCV/rvv/expandload.ll
index 86e66ea335964b..eb862510a5bdfd 100644
--- a/llvm/test/CodeGen/RISCV/rvv/expandload.ll
+++ b/llvm/test/CodeGen/RISCV/rvv/expandload.ll
@@ -1215,99 +1215,10 @@ define <512 x i8> @test_expandload_v512i8(ptr %base, <512 x i1> %mask, <512 x i8
 }
 
 ; FIXME: Don't know how to make it legal.
-define <1024 x i8> @test_expandload_v1024i8(ptr %base, <1024 x i1> %mask, <1024 x i8> %passthru) "target-features"="+zvl1024b" {
-; CHECK-RV32-LABEL: test_expandload_v1024i8:
-; CHECK-RV32:       # %bb.0:
-; CHECK-RV32-NEXT:    addi sp, sp, -16
-; CHECK-RV32-NEXT:    .cfi_def_cfa_offset 16
-; CHECK-RV32-NEXT:    csrr a1, vlenb
-; CHECK-RV32-NEXT:    slli a1, a1, 3
-; CHECK-RV32-NEXT:    sub sp, sp, a1
-; CHECK-RV32-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x08, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 8 * vlenb
-; CHECK-RV32-NEXT:    vmv1r.v v7, v0
-; CHECK-RV32-NEXT:    li a1, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a1, e8, m4, ta, ma
-; CHECK-RV32-NEXT:    vcpop.m a2, v0
-; CHECK-RV32-NEXT:    add a2, a0, a2
-; CHECK-RV32-NEXT:    li a3, 64
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vslidedown.vx v0, v0, a3
-; CHECK-RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
-; CHECK-RV32-NEXT:    viota.m v16, v0
-; CHECK-RV32-NEXT:    addi a3, sp, 16
-; CHECK-RV32-NEXT:    vs8r.v v16, (a3) # Unknown-size Folded Spill
-; CHECK-RV32-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
-; CHECK-RV32-NEXT:    vslidedown.vx v24, v8, a1
-; CHECK-RV32-NEXT:    vl8r.v v16, (a3) # Unknown-size Folded Reload
-; CHECK-RV32-NEXT:    vsetvli zero, a1, e8, m4, ta, mu
-; CHECK-RV32-NEXT:    vluxei16.v v24, (a2), v16, v0.t
-; CHECK-RV32-NEXT:    vsetvli zero, zero, e16, m8, ta, ma
-; CHECK-RV32-NEXT:    viota.m v16, v7
-; CHECK-RV32-NEXT:    vmv1r.v v0, v7
-; CHECK-RV32-NEXT:    vsetvli zero, zero, e8, m4, ta, mu
-; CHECK-RV32-NEXT:    vluxei16.v v8, (a0), v16, v0.t
-; CHECK-RV32-NEXT:    li a0, 1024
-; CHECK-RV32-NEXT:    vsetvli zero, a0, e8, m8, ta, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a1
-; CHECK-RV32-NEXT:    csrr a0, vlenb
-; CHECK-RV32-NEXT:    slli a0, a0, 3
-; CHECK-RV32-NEXT:    add sp, sp, a0
-; CHECK-RV32-NEXT:    addi sp, sp, 16
-; CHECK-RV32-NEXT:    ret
-;
-; CHECK-RV64-LABEL: test_expandload_v1024i8:
-; CHECK-RV64:       # %bb.0:
-; CHECK-RV64-NEXT:    addi sp, sp, -16
-; CHECK-RV64-NEXT:    .cfi_def_cfa_offset 16
-; CHECK-RV64-NEXT:    csrr a1, vlenb
-; CHECK-RV64-NEXT:    slli a1, a1, 4
-; CHECK-RV64-NEXT:    sub sp, sp, a1
-; CHECK-RV64-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
-; CHECK-RV64-NEXT:    vmv1r.v v7, v0
-; CHECK-RV64-NEXT:    li a1, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, ta, ma
-; CHECK-RV64-NEXT:    vcpop.m a2, v0
-; CHECK-RV64-NEXT:    add a2, a0, a2
-; CHECK-RV64-NEXT:    li a3, 64
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vslidedown.vx v0, v0, a3
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
-; CHECK-RV64-NEXT:    viota.m v24, v0
-; CHECK-RV64-NEXT:    csrr a3, vlenb
-; CHECK-RV64-NEXT:    slli a3, a3, 3
-; CHECK-RV64-NEXT:    add a3, sp, a3
-; CHECK-RV64-NEXT:    addi a3, a3, 16
-; CHECK-RV64-NEXT:    vs8r.v v24, (a3) # Unknown-size Folded Spill
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
-; CHECK-RV64-NEXT:    vslidedown.vx v16, v8, a1
-; CHECK-RV64-NEXT:    addi a3, sp, 16
-; CHECK-RV64-NEXT:    vs8r.v v16, (a3) # Unknown-size Folded Spill
-; CHECK-RV64-NEXT:    csrr a3, vlenb
-; CHECK-RV64-NEXT:    slli a3, a3, 3
-; CHECK-RV64-NEXT:    add a3, sp, a3
-; CHECK-RV64-NEXT:    addi a3, a3, 16
-; CHECK-RV64-NEXT:    vl8r.v v24, (a3) # Unknown-size Folded Reload
-; CHECK-RV64-NEXT:    addi a3, sp, 16
-; CHECK-RV64-NEXT:    vl8r.v v16, (a3) # Unknown-size Folded Reload
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, ta, mu
-; CHECK-RV64-NEXT:    vluxei16.v v16, (a2), v24, v0.t
-; CHECK-RV64-NEXT:    vmv.v.v v24, v16
-; CHECK-RV64-NEXT:    vsetvli zero, zero, e16, m8, ta, ma
-; CHECK-RV64-NEXT:    viota.m v16, v7
-; CHECK-RV64-NEXT:    vmv1r.v v0, v7
-; CHECK-RV64-NEXT:    vsetvli zero, zero, e8, m4, ta, mu
-; CHECK-RV64-NEXT:    vluxei16.v v8, (a0), v16, v0.t
-; CHECK-RV64-NEXT:    li a0, 1024
-; CHECK-RV64-NEXT:    vsetvli zero, a0, e8, m8, ta, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a1
-; CHECK-RV64-NEXT:    csrr a0, vlenb
-; CHECK-RV64-NEXT:    slli a0, a0, 4
-; CHECK-RV64-NEXT:    add sp, sp, a0
-; CHECK-RV64-NEXT:    addi sp, sp, 16
-; CHECK-RV64-NEXT:    ret
-  %res = call <1024 x i8> @llvm.masked.expandload.v1024i8(ptr align 1 %base, <1024 x i1> %mask, <1024 x i8> %passthru)
-  ret <1024 x i8> %res
-}
+; define <1024 x i8> @test_expandload_v1024i8(ptr %base, <1024 x i1> %mask, <1024 x i8> %passthru) "target-features"="+zvl1024b" {
+;   %res = call <1024 x i8> @llvm.masked.expandload.v1024i8(ptr align 1 %base, <1024 x i1> %mask, <1024 x i8> %passthru)
+;   ret <1024 x i8> %res
+; }
 
 declare <512 x i8> @llvm.masked.expandload.v512i8(ptr, <512 x i1>, <512 x i8>)
 declare <1024 x i8> @llvm.masked.expandload.v1024i8(ptr, <1024 x i1>, <1024 x i8>)

>From 67b1c2208317f168f5f2809ab8424f9b1ffc3030 Mon Sep 17 00:00:00 2001
From: Wang Pengcheng <wangpengcheng.pp at bytedance.com>
Date: Tue, 6 Aug 2024 19:53:06 +0800
Subject: [PATCH 07/15] Return SDValue() for LMUL 8 case and remote
 declarations in test

---
 llvm/lib/Target/RISCV/RISCVISelLowering.cpp |  6 +--
 llvm/test/CodeGen/RISCV/rvv/expandload.ll   | 45 +--------------------
 2 files changed, 4 insertions(+), 47 deletions(-)

diff --git a/llvm/lib/Target/RISCV/RISCVISelLowering.cpp b/llvm/lib/Target/RISCV/RISCVISelLowering.cpp
index 33b6cec3e17e76..f724fb286e5dda 100644
--- a/llvm/lib/Target/RISCV/RISCVISelLowering.cpp
+++ b/llvm/lib/Target/RISCV/RISCVISelLowering.cpp
@@ -11151,9 +11151,9 @@ SDValue RISCVTargetLowering::lowerMaskedLoad(SDValue Op,
     // should change the element type of index vector to i16 to avoid overflow.
     if (IndexEltVT == MVT::i8 &&
         VT.getVectorElementCount().getKnownMinValue() > 256) {
-      // FIXME: Don't know how to make LMUL==8 case legal.
-      assert(getLMUL(IndexVT) != RISCVII::LMUL_8 &&
-             "We don't know how to lower LMUL==8 case");
+      // FIXME: We need to do vector splitting manually for LMUL=8 cases.
+      if (getLMUL(IndexVT) == RISCVII::LMUL_8)
+        return SDValue();
       IndexVT = IndexVT.changeVectorElementType(MVT::i16);
     }
 
diff --git a/llvm/test/CodeGen/RISCV/rvv/expandload.ll b/llvm/test/CodeGen/RISCV/rvv/expandload.ll
index eb862510a5bdfd..e7b30e518513c7 100644
--- a/llvm/test/CodeGen/RISCV/rvv/expandload.ll
+++ b/llvm/test/CodeGen/RISCV/rvv/expandload.ll
@@ -334,16 +334,6 @@ define <256 x i8> @test_expandload_v256i8_all_ones(ptr %base, <256 x i8> %passth
   ret <256 x i8> %res
 }
 
-declare <1 x i8> @llvm.masked.expandload.v1i8(ptr, <1 x i1>, <1 x i8>)
-declare <2 x i8> @llvm.masked.expandload.v2i8(ptr, <2 x i1>, <2 x i8>)
-declare <4 x i8> @llvm.masked.expandload.v4i8(ptr, <4 x i1>, <4 x i8>)
-declare <8 x i8> @llvm.masked.expandload.v8i8(ptr, <8 x i1>, <8 x i8>)
-declare <16 x i8> @llvm.masked.expandload.v16i8(ptr, <16 x i1>, <16 x i8>)
-declare <32 x i8> @llvm.masked.expandload.v32i8(ptr, <32 x i1>, <32 x i8>)
-declare <64 x i8> @llvm.masked.expandload.v64i8(ptr, <64 x i1>, <64 x i8>)
-declare <128 x i8> @llvm.masked.expandload.v128i8(ptr, <128 x i1>, <128 x i8>)
-declare <256 x i8> @llvm.masked.expandload.v256i8(ptr, <256 x i1>, <256 x i8>)
-
 ; Load + expand for i16 type
 
 define <1 x i16> @test_expandload_v1i16(ptr %base, <1 x i1> %mask, <1 x i16> %passthru) {
@@ -659,15 +649,6 @@ define <128 x i16> @test_expandload_v128i16_all_ones(ptr %base, <128 x i16> %pas
   ret <128 x i16> %res
 }
 
-declare <1 x i16> @llvm.masked.expandload.v1i16(ptr, <1 x i1>, <1 x i16>)
-declare <2 x i16> @llvm.masked.expandload.v2i16(ptr, <2 x i1>, <2 x i16>)
-declare <4 x i16> @llvm.masked.expandload.v4i16(ptr, <4 x i1>, <4 x i16>)
-declare <8 x i16> @llvm.masked.expandload.v8i16(ptr, <8 x i1>, <8 x i16>)
-declare <16 x i16> @llvm.masked.expandload.v16i16(ptr, <16 x i1>, <16 x i16>)
-declare <32 x i16> @llvm.masked.expandload.v32i16(ptr, <32 x i1>, <32 x i16>)
-declare <64 x i16> @llvm.masked.expandload.v64i16(ptr, <64 x i1>, <64 x i16>)
-declare <128 x i16> @llvm.masked.expandload.v128i16(ptr, <128 x i1>, <128 x i16>)
-
 ; Load + expand for i32 type
 
 define <1 x i32> @test_expandload_v1i32(ptr %base, <1 x i1> %mask, <1 x i32> %passthru) {
@@ -934,14 +915,6 @@ define <64 x i32> @test_expandload_v64i32_all_ones(ptr %base, <64 x i32> %passth
   ret <64 x i32> %res
 }
 
-declare <1 x i32> @llvm.masked.expandload.v1i32(ptr, <1 x i1>, <1 x i32>)
-declare <2 x i32> @llvm.masked.expandload.v2i32(ptr, <2 x i1>, <2 x i32>)
-declare <4 x i32> @llvm.masked.expandload.v4i32(ptr, <4 x i1>, <4 x i32>)
-declare <8 x i32> @llvm.masked.expandload.v8i32(ptr, <8 x i1>, <8 x i32>)
-declare <16 x i32> @llvm.masked.expandload.v16i32(ptr, <16 x i1>, <16 x i32>)
-declare <32 x i32> @llvm.masked.expandload.v32i32(ptr, <32 x i1>, <32 x i32>)
-declare <64 x i32> @llvm.masked.expandload.v64i32(ptr, <64 x i1>, <64 x i32>)
-
 ; Load + expand for i64 type
 
 define <1 x i64> @test_expandload_v1i64(ptr %base, <1 x i1> %mask, <1 x i64> %passthru) {
@@ -1192,16 +1165,9 @@ define <32 x i64> @test_expandload_v32i64_all_ones(ptr %base, <32 x i64> %passth
   ret <32 x i64> %res
 }
 
-declare <1 x i64> @llvm.masked.expandload.v1i64(ptr, <1 x i1>, <1 x i64>)
-declare <2 x i64> @llvm.masked.expandload.v2i64(ptr, <2 x i1>, <2 x i64>)
-declare <4 x i64> @llvm.masked.expandload.v4i64(ptr, <4 x i1>, <4 x i64>)
-declare <8 x i64> @llvm.masked.expandload.v8i64(ptr, <8 x i1>, <8 x i64>)
-declare <16 x i64> @llvm.masked.expandload.v16i64(ptr, <16 x i1>, <16 x i64>)
-declare <32 x i64> @llvm.masked.expandload.v32i64(ptr, <32 x i1>, <32 x i64>)
-
 ; Tests that will exceed the range of i8 index.
 
-define <512 x i8> @test_expandload_v512i8(ptr %base, <512 x i1> %mask, <512 x i8> %passthru) "target-features"="+zvl1024b" {
+define <512 x i8> @test_expandload_v512i8(ptr %base, <512 x i1> %mask, <512 x i8> %passthru) vscale_range(16, 1024) {
 ; CHECK-LABEL: test_expandload_v512i8:
 ; CHECK:       # %bb.0:
 ; CHECK-NEXT:    li a1, 512
@@ -1213,12 +1179,3 @@ define <512 x i8> @test_expandload_v512i8(ptr %base, <512 x i1> %mask, <512 x i8
   %res = call <512 x i8> @llvm.masked.expandload.v512i8(ptr align 1 %base, <512 x i1> %mask, <512 x i8> %passthru)
   ret <512 x i8> %res
 }
-
-; FIXME: Don't know how to make it legal.
-; define <1024 x i8> @test_expandload_v1024i8(ptr %base, <1024 x i1> %mask, <1024 x i8> %passthru) "target-features"="+zvl1024b" {
-;   %res = call <1024 x i8> @llvm.masked.expandload.v1024i8(ptr align 1 %base, <1024 x i1> %mask, <1024 x i8> %passthru)
-;   ret <1024 x i8> %res
-; }
-
-declare <512 x i8> @llvm.masked.expandload.v512i8(ptr, <512 x i1>, <512 x i8>)
-declare <1024 x i8> @llvm.masked.expandload.v1024i8(ptr, <1024 x i1>, <1024 x i8>)

>From 7cc50b0563dc09cd759c3077588596278cfe7fdd Mon Sep 17 00:00:00 2001
From: Wang Pengcheng <wangpengcheng.pp at bytedance.com>
Date: Tue, 6 Aug 2024 20:21:43 +0800
Subject: [PATCH 08/15] Clean includes

---
 llvm/lib/Target/RISCV/RISCVISelLowering.cpp | 1 -
 1 file changed, 1 deletion(-)

diff --git a/llvm/lib/Target/RISCV/RISCVISelLowering.cpp b/llvm/lib/Target/RISCV/RISCVISelLowering.cpp
index f724fb286e5dda..a07159e576aa70 100644
--- a/llvm/lib/Target/RISCV/RISCVISelLowering.cpp
+++ b/llvm/lib/Target/RISCV/RISCVISelLowering.cpp
@@ -46,7 +46,6 @@
 #include "llvm/Support/KnownBits.h"
 #include "llvm/Support/MathExtras.h"
 #include "llvm/Support/raw_ostream.h"
-#include "llvm/TargetParser/RISCVTargetParser.h"
 #include <optional>
 
 using namespace llvm;

>From bce834306cbed3d0a534aec96fbaad63ca6bc51f Mon Sep 17 00:00:00 2001
From: Wang Pengcheng <wangpengcheng.pp at bytedance.com>
Date: Wed, 7 Aug 2024 11:07:28 +0800
Subject: [PATCH 09/15] Use getVectorNumElements

---
 llvm/lib/Target/RISCV/RISCVISelLowering.cpp | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)

diff --git a/llvm/lib/Target/RISCV/RISCVISelLowering.cpp b/llvm/lib/Target/RISCV/RISCVISelLowering.cpp
index a07159e576aa70..6753356e53d336 100644
--- a/llvm/lib/Target/RISCV/RISCVISelLowering.cpp
+++ b/llvm/lib/Target/RISCV/RISCVISelLowering.cpp
@@ -11148,8 +11148,7 @@ SDValue RISCVTargetLowering::lowerMaskedLoad(SDValue Op,
 
     // If index vector is an i8 vector and the element count exceeds 256, we
     // should change the element type of index vector to i16 to avoid overflow.
-    if (IndexEltVT == MVT::i8 &&
-        VT.getVectorElementCount().getKnownMinValue() > 256) {
+    if (IndexEltVT == MVT::i8 && VT.getVectorNumElements() > 256) {
       // FIXME: We need to do vector splitting manually for LMUL=8 cases.
       if (getLMUL(IndexVT) == RISCVII::LMUL_8)
         return SDValue();

>From 47a7d193c9f49875046cd6778b6c851e2cc09b2f Mon Sep 17 00:00:00 2001
From: Wang Pengcheng <wangpengcheng.pp at bytedance.com>
Date: Wed, 7 Aug 2024 12:09:25 +0800
Subject: [PATCH 10/15] Scalarize i8 vectors with LMUL>=8

---
 .../Target/RISCV/RISCVTargetTransformInfo.cpp |     8 +
 llvm/test/CodeGen/RISCV/rvv/expandload.ll     | 18542 ++++++++++++++++
 2 files changed, 18550 insertions(+)

diff --git a/llvm/lib/Target/RISCV/RISCVTargetTransformInfo.cpp b/llvm/lib/Target/RISCV/RISCVTargetTransformInfo.cpp
index d1d54990ab15ce..dacbf1a3991cde 100644
--- a/llvm/lib/Target/RISCV/RISCVTargetTransformInfo.cpp
+++ b/llvm/lib/Target/RISCV/RISCVTargetTransformInfo.cpp
@@ -15,6 +15,7 @@
 #include "llvm/CodeGen/TargetLowering.h"
 #include "llvm/IR/Instructions.h"
 #include "llvm/IR/PatternMatch.h"
+#include "llvm/Support/TypeSize.h"
 #include <cmath>
 #include <optional>
 using namespace llvm;
@@ -2293,6 +2294,13 @@ bool RISCVTTIImpl::isLegalMaskedExpandLoad(Type *DataTy, Align Alignment) {
 
   if (!isLegalMaskedLoadStore(DataTy, Alignment))
     return false;
+
+  // FIXME: If it is an i8 vector and the element count exceeds 256, we should
+  // scalarize these types with LMUL >= maximum fixed-length LMUL.
+  if (VTy->getElementType()->isIntegerTy(8))
+    if (VTy->getElementCount().getFixedValue() > 256)
+      return VTy->getPrimitiveSizeInBits() / ST->getRealMinVLen() <
+             ST->getMaxLMULForFixedLengthVectors();
   return true;
 }
 
diff --git a/llvm/test/CodeGen/RISCV/rvv/expandload.ll b/llvm/test/CodeGen/RISCV/rvv/expandload.ll
index e7b30e518513c7..26eee6b28d5647 100644
--- a/llvm/test/CodeGen/RISCV/rvv/expandload.ll
+++ b/llvm/test/CodeGen/RISCV/rvv/expandload.ll
@@ -1179,3 +1179,18545 @@ define <512 x i8> @test_expandload_v512i8(ptr %base, <512 x i1> %mask, <512 x i8
   %res = call <512 x i8> @llvm.masked.expandload.v512i8(ptr align 1 %base, <512 x i1> %mask, <512 x i8> %passthru)
   ret <512 x i8> %res
 }
+
+; FIXME: We can split it in lowering.
+define <512 x i8> @test_expandload_v512i8_vlen512(ptr %base, <512 x i1> %mask, <512 x i8> %passthru) vscale_range(8, 1024) {
+; CHECK-RV32-LABEL: test_expandload_v512i8_vlen512:
+; CHECK-RV32:       # %bb.0:
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.x.s a3, v0
+; CHECK-RV32-NEXT:    andi a1, a3, 1
+; CHECK-RV32-NEXT:    beqz a1, .LBB61_1
+; CHECK-RV32-NEXT:    j .LBB61_544
+; CHECK-RV32-NEXT:  .LBB61_1: # %else
+; CHECK-RV32-NEXT:    andi a1, a3, 2
+; CHECK-RV32-NEXT:    beqz a1, .LBB61_2
+; CHECK-RV32-NEXT:    j .LBB61_545
+; CHECK-RV32-NEXT:  .LBB61_2: # %else2
+; CHECK-RV32-NEXT:    andi a1, a3, 4
+; CHECK-RV32-NEXT:    beqz a1, .LBB61_3
+; CHECK-RV32-NEXT:    j .LBB61_546
+; CHECK-RV32-NEXT:  .LBB61_3: # %else6
+; CHECK-RV32-NEXT:    andi a1, a3, 8
+; CHECK-RV32-NEXT:    beqz a1, .LBB61_4
+; CHECK-RV32-NEXT:    j .LBB61_547
+; CHECK-RV32-NEXT:  .LBB61_4: # %else10
+; CHECK-RV32-NEXT:    andi a1, a3, 16
+; CHECK-RV32-NEXT:    beqz a1, .LBB61_5
+; CHECK-RV32-NEXT:    j .LBB61_548
+; CHECK-RV32-NEXT:  .LBB61_5: # %else14
+; CHECK-RV32-NEXT:    andi a1, a3, 32
+; CHECK-RV32-NEXT:    beqz a1, .LBB61_6
+; CHECK-RV32-NEXT:    j .LBB61_549
+; CHECK-RV32-NEXT:  .LBB61_6: # %else18
+; CHECK-RV32-NEXT:    andi a1, a3, 64
+; CHECK-RV32-NEXT:    beqz a1, .LBB61_7
+; CHECK-RV32-NEXT:    j .LBB61_550
+; CHECK-RV32-NEXT:  .LBB61_7: # %else22
+; CHECK-RV32-NEXT:    andi a1, a3, 128
+; CHECK-RV32-NEXT:    beqz a1, .LBB61_8
+; CHECK-RV32-NEXT:    j .LBB61_551
+; CHECK-RV32-NEXT:  .LBB61_8: # %else26
+; CHECK-RV32-NEXT:    andi a1, a3, 256
+; CHECK-RV32-NEXT:    beqz a1, .LBB61_9
+; CHECK-RV32-NEXT:    j .LBB61_552
+; CHECK-RV32-NEXT:  .LBB61_9: # %else30
+; CHECK-RV32-NEXT:    andi a1, a3, 512
+; CHECK-RV32-NEXT:    beqz a1, .LBB61_10
+; CHECK-RV32-NEXT:    j .LBB61_553
+; CHECK-RV32-NEXT:  .LBB61_10: # %else34
+; CHECK-RV32-NEXT:    andi a1, a3, 1024
+; CHECK-RV32-NEXT:    beqz a1, .LBB61_11
+; CHECK-RV32-NEXT:    j .LBB61_554
+; CHECK-RV32-NEXT:  .LBB61_11: # %else38
+; CHECK-RV32-NEXT:    slli a1, a3, 20
+; CHECK-RV32-NEXT:    bgez a1, .LBB61_12
+; CHECK-RV32-NEXT:    j .LBB61_555
+; CHECK-RV32-NEXT:  .LBB61_12: # %else42
+; CHECK-RV32-NEXT:    slli a1, a3, 19
+; CHECK-RV32-NEXT:    bgez a1, .LBB61_13
+; CHECK-RV32-NEXT:    j .LBB61_556
+; CHECK-RV32-NEXT:  .LBB61_13: # %else46
+; CHECK-RV32-NEXT:    slli a1, a3, 18
+; CHECK-RV32-NEXT:    bgez a1, .LBB61_14
+; CHECK-RV32-NEXT:    j .LBB61_557
+; CHECK-RV32-NEXT:  .LBB61_14: # %else50
+; CHECK-RV32-NEXT:    slli a1, a3, 17
+; CHECK-RV32-NEXT:    bgez a1, .LBB61_15
+; CHECK-RV32-NEXT:    j .LBB61_558
+; CHECK-RV32-NEXT:  .LBB61_15: # %else54
+; CHECK-RV32-NEXT:    slli a1, a3, 16
+; CHECK-RV32-NEXT:    bgez a1, .LBB61_16
+; CHECK-RV32-NEXT:    j .LBB61_559
+; CHECK-RV32-NEXT:  .LBB61_16: # %else58
+; CHECK-RV32-NEXT:    slli a1, a3, 15
+; CHECK-RV32-NEXT:    bgez a1, .LBB61_17
+; CHECK-RV32-NEXT:    j .LBB61_560
+; CHECK-RV32-NEXT:  .LBB61_17: # %else62
+; CHECK-RV32-NEXT:    slli a1, a3, 14
+; CHECK-RV32-NEXT:    bgez a1, .LBB61_18
+; CHECK-RV32-NEXT:    j .LBB61_561
+; CHECK-RV32-NEXT:  .LBB61_18: # %else66
+; CHECK-RV32-NEXT:    slli a1, a3, 13
+; CHECK-RV32-NEXT:    bgez a1, .LBB61_19
+; CHECK-RV32-NEXT:    j .LBB61_562
+; CHECK-RV32-NEXT:  .LBB61_19: # %else70
+; CHECK-RV32-NEXT:    slli a1, a3, 12
+; CHECK-RV32-NEXT:    bgez a1, .LBB61_20
+; CHECK-RV32-NEXT:    j .LBB61_563
+; CHECK-RV32-NEXT:  .LBB61_20: # %else74
+; CHECK-RV32-NEXT:    slli a1, a3, 11
+; CHECK-RV32-NEXT:    bgez a1, .LBB61_21
+; CHECK-RV32-NEXT:    j .LBB61_564
+; CHECK-RV32-NEXT:  .LBB61_21: # %else78
+; CHECK-RV32-NEXT:    slli a1, a3, 10
+; CHECK-RV32-NEXT:    bgez a1, .LBB61_22
+; CHECK-RV32-NEXT:    j .LBB61_565
+; CHECK-RV32-NEXT:  .LBB61_22: # %else82
+; CHECK-RV32-NEXT:    slli a1, a3, 9
+; CHECK-RV32-NEXT:    bgez a1, .LBB61_23
+; CHECK-RV32-NEXT:    j .LBB61_566
+; CHECK-RV32-NEXT:  .LBB61_23: # %else86
+; CHECK-RV32-NEXT:    slli a1, a3, 8
+; CHECK-RV32-NEXT:    bgez a1, .LBB61_24
+; CHECK-RV32-NEXT:    j .LBB61_567
+; CHECK-RV32-NEXT:  .LBB61_24: # %else90
+; CHECK-RV32-NEXT:    slli a1, a3, 7
+; CHECK-RV32-NEXT:    bgez a1, .LBB61_25
+; CHECK-RV32-NEXT:    j .LBB61_568
+; CHECK-RV32-NEXT:  .LBB61_25: # %else94
+; CHECK-RV32-NEXT:    slli a1, a3, 6
+; CHECK-RV32-NEXT:    bgez a1, .LBB61_26
+; CHECK-RV32-NEXT:    j .LBB61_569
+; CHECK-RV32-NEXT:  .LBB61_26: # %else98
+; CHECK-RV32-NEXT:    slli a1, a3, 5
+; CHECK-RV32-NEXT:    bgez a1, .LBB61_27
+; CHECK-RV32-NEXT:    j .LBB61_570
+; CHECK-RV32-NEXT:  .LBB61_27: # %else102
+; CHECK-RV32-NEXT:    slli a1, a3, 4
+; CHECK-RV32-NEXT:    bgez a1, .LBB61_28
+; CHECK-RV32-NEXT:    j .LBB61_571
+; CHECK-RV32-NEXT:  .LBB61_28: # %else106
+; CHECK-RV32-NEXT:    slli a1, a3, 3
+; CHECK-RV32-NEXT:    bgez a1, .LBB61_30
+; CHECK-RV32-NEXT:  .LBB61_29: # %cond.load109
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 29, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 28
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:  .LBB61_30: # %else110
+; CHECK-RV32-NEXT:    slli a2, a3, 2
+; CHECK-RV32-NEXT:    li a1, 32
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_32
+; CHECK-RV32-NEXT:  # %bb.31: # %cond.load113
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 30, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 29
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:  .LBB61_32: # %else114
+; CHECK-RV32-NEXT:    slli a2, a3, 1
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vsrl.vx v16, v0, a1
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_34
+; CHECK-RV32-NEXT:  # %bb.33: # %cond.load117
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v17, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vsetivli zero, 31, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vi v8, v17, 30
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:  .LBB61_34: # %else118
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.x.s a2, v16
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_35
+; CHECK-RV32-NEXT:    j .LBB61_572
+; CHECK-RV32-NEXT:  .LBB61_35: # %else122
+; CHECK-RV32-NEXT:    andi a3, a2, 1
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_36
+; CHECK-RV32-NEXT:    j .LBB61_573
+; CHECK-RV32-NEXT:  .LBB61_36: # %else126
+; CHECK-RV32-NEXT:    andi a3, a2, 2
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_37
+; CHECK-RV32-NEXT:    j .LBB61_574
+; CHECK-RV32-NEXT:  .LBB61_37: # %else130
+; CHECK-RV32-NEXT:    andi a3, a2, 4
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_38
+; CHECK-RV32-NEXT:    j .LBB61_575
+; CHECK-RV32-NEXT:  .LBB61_38: # %else134
+; CHECK-RV32-NEXT:    andi a3, a2, 8
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_39
+; CHECK-RV32-NEXT:    j .LBB61_576
+; CHECK-RV32-NEXT:  .LBB61_39: # %else138
+; CHECK-RV32-NEXT:    andi a3, a2, 16
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_40
+; CHECK-RV32-NEXT:    j .LBB61_577
+; CHECK-RV32-NEXT:  .LBB61_40: # %else142
+; CHECK-RV32-NEXT:    andi a3, a2, 32
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_41
+; CHECK-RV32-NEXT:    j .LBB61_578
+; CHECK-RV32-NEXT:  .LBB61_41: # %else146
+; CHECK-RV32-NEXT:    andi a3, a2, 64
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_42
+; CHECK-RV32-NEXT:    j .LBB61_579
+; CHECK-RV32-NEXT:  .LBB61_42: # %else150
+; CHECK-RV32-NEXT:    andi a3, a2, 128
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_43
+; CHECK-RV32-NEXT:    j .LBB61_580
+; CHECK-RV32-NEXT:  .LBB61_43: # %else154
+; CHECK-RV32-NEXT:    andi a3, a2, 256
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_44
+; CHECK-RV32-NEXT:    j .LBB61_581
+; CHECK-RV32-NEXT:  .LBB61_44: # %else158
+; CHECK-RV32-NEXT:    andi a3, a2, 512
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_45
+; CHECK-RV32-NEXT:    j .LBB61_582
+; CHECK-RV32-NEXT:  .LBB61_45: # %else162
+; CHECK-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_46
+; CHECK-RV32-NEXT:    j .LBB61_583
+; CHECK-RV32-NEXT:  .LBB61_46: # %else166
+; CHECK-RV32-NEXT:    slli a3, a2, 20
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_47
+; CHECK-RV32-NEXT:    j .LBB61_584
+; CHECK-RV32-NEXT:  .LBB61_47: # %else170
+; CHECK-RV32-NEXT:    slli a3, a2, 19
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_48
+; CHECK-RV32-NEXT:    j .LBB61_585
+; CHECK-RV32-NEXT:  .LBB61_48: # %else174
+; CHECK-RV32-NEXT:    slli a3, a2, 18
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_49
+; CHECK-RV32-NEXT:    j .LBB61_586
+; CHECK-RV32-NEXT:  .LBB61_49: # %else178
+; CHECK-RV32-NEXT:    slli a3, a2, 17
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_50
+; CHECK-RV32-NEXT:    j .LBB61_587
+; CHECK-RV32-NEXT:  .LBB61_50: # %else182
+; CHECK-RV32-NEXT:    slli a3, a2, 16
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_51
+; CHECK-RV32-NEXT:    j .LBB61_588
+; CHECK-RV32-NEXT:  .LBB61_51: # %else186
+; CHECK-RV32-NEXT:    slli a3, a2, 15
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_52
+; CHECK-RV32-NEXT:    j .LBB61_589
+; CHECK-RV32-NEXT:  .LBB61_52: # %else190
+; CHECK-RV32-NEXT:    slli a3, a2, 14
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_53
+; CHECK-RV32-NEXT:    j .LBB61_590
+; CHECK-RV32-NEXT:  .LBB61_53: # %else194
+; CHECK-RV32-NEXT:    slli a3, a2, 13
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_54
+; CHECK-RV32-NEXT:    j .LBB61_591
+; CHECK-RV32-NEXT:  .LBB61_54: # %else198
+; CHECK-RV32-NEXT:    slli a3, a2, 12
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_55
+; CHECK-RV32-NEXT:    j .LBB61_592
+; CHECK-RV32-NEXT:  .LBB61_55: # %else202
+; CHECK-RV32-NEXT:    slli a3, a2, 11
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_56
+; CHECK-RV32-NEXT:    j .LBB61_593
+; CHECK-RV32-NEXT:  .LBB61_56: # %else206
+; CHECK-RV32-NEXT:    slli a3, a2, 10
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_57
+; CHECK-RV32-NEXT:    j .LBB61_594
+; CHECK-RV32-NEXT:  .LBB61_57: # %else210
+; CHECK-RV32-NEXT:    slli a3, a2, 9
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_58
+; CHECK-RV32-NEXT:    j .LBB61_595
+; CHECK-RV32-NEXT:  .LBB61_58: # %else214
+; CHECK-RV32-NEXT:    slli a3, a2, 8
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_59
+; CHECK-RV32-NEXT:    j .LBB61_596
+; CHECK-RV32-NEXT:  .LBB61_59: # %else218
+; CHECK-RV32-NEXT:    slli a3, a2, 7
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_60
+; CHECK-RV32-NEXT:    j .LBB61_597
+; CHECK-RV32-NEXT:  .LBB61_60: # %else222
+; CHECK-RV32-NEXT:    slli a3, a2, 6
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_61
+; CHECK-RV32-NEXT:    j .LBB61_598
+; CHECK-RV32-NEXT:  .LBB61_61: # %else226
+; CHECK-RV32-NEXT:    slli a3, a2, 5
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_62
+; CHECK-RV32-NEXT:    j .LBB61_599
+; CHECK-RV32-NEXT:  .LBB61_62: # %else230
+; CHECK-RV32-NEXT:    slli a3, a2, 4
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_63
+; CHECK-RV32-NEXT:    j .LBB61_600
+; CHECK-RV32-NEXT:  .LBB61_63: # %else234
+; CHECK-RV32-NEXT:    slli a3, a2, 3
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_64
+; CHECK-RV32-NEXT:    j .LBB61_601
+; CHECK-RV32-NEXT:  .LBB61_64: # %else238
+; CHECK-RV32-NEXT:    slli a3, a2, 2
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_66
+; CHECK-RV32-NEXT:  .LBB61_65: # %cond.load241
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 62
+; CHECK-RV32-NEXT:    li a4, 61
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:  .LBB61_66: # %else242
+; CHECK-RV32-NEXT:    slli a3, a2, 1
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vslidedown.vi v16, v0, 1
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_68
+; CHECK-RV32-NEXT:  # %bb.67: # %cond.load245
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v17, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 63
+; CHECK-RV32-NEXT:    li a4, 62
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v17, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:  .LBB61_68: # %else246
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.x.s a3, v16
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_69
+; CHECK-RV32-NEXT:    j .LBB61_602
+; CHECK-RV32-NEXT:  .LBB61_69: # %else250
+; CHECK-RV32-NEXT:    andi a2, a3, 1
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_70
+; CHECK-RV32-NEXT:    j .LBB61_603
+; CHECK-RV32-NEXT:  .LBB61_70: # %else254
+; CHECK-RV32-NEXT:    andi a2, a3, 2
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_71
+; CHECK-RV32-NEXT:    j .LBB61_604
+; CHECK-RV32-NEXT:  .LBB61_71: # %else258
+; CHECK-RV32-NEXT:    andi a2, a3, 4
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_72
+; CHECK-RV32-NEXT:    j .LBB61_605
+; CHECK-RV32-NEXT:  .LBB61_72: # %else262
+; CHECK-RV32-NEXT:    andi a2, a3, 8
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_73
+; CHECK-RV32-NEXT:    j .LBB61_606
+; CHECK-RV32-NEXT:  .LBB61_73: # %else266
+; CHECK-RV32-NEXT:    andi a2, a3, 16
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_74
+; CHECK-RV32-NEXT:    j .LBB61_607
+; CHECK-RV32-NEXT:  .LBB61_74: # %else270
+; CHECK-RV32-NEXT:    andi a2, a3, 32
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_75
+; CHECK-RV32-NEXT:    j .LBB61_608
+; CHECK-RV32-NEXT:  .LBB61_75: # %else274
+; CHECK-RV32-NEXT:    andi a2, a3, 64
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_76
+; CHECK-RV32-NEXT:    j .LBB61_609
+; CHECK-RV32-NEXT:  .LBB61_76: # %else278
+; CHECK-RV32-NEXT:    andi a2, a3, 128
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_77
+; CHECK-RV32-NEXT:    j .LBB61_610
+; CHECK-RV32-NEXT:  .LBB61_77: # %else282
+; CHECK-RV32-NEXT:    andi a2, a3, 256
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_78
+; CHECK-RV32-NEXT:    j .LBB61_611
+; CHECK-RV32-NEXT:  .LBB61_78: # %else286
+; CHECK-RV32-NEXT:    andi a2, a3, 512
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_79
+; CHECK-RV32-NEXT:    j .LBB61_612
+; CHECK-RV32-NEXT:  .LBB61_79: # %else290
+; CHECK-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_80
+; CHECK-RV32-NEXT:    j .LBB61_613
+; CHECK-RV32-NEXT:  .LBB61_80: # %else294
+; CHECK-RV32-NEXT:    slli a2, a3, 20
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_81
+; CHECK-RV32-NEXT:    j .LBB61_614
+; CHECK-RV32-NEXT:  .LBB61_81: # %else298
+; CHECK-RV32-NEXT:    slli a2, a3, 19
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_82
+; CHECK-RV32-NEXT:    j .LBB61_615
+; CHECK-RV32-NEXT:  .LBB61_82: # %else302
+; CHECK-RV32-NEXT:    slli a2, a3, 18
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_83
+; CHECK-RV32-NEXT:    j .LBB61_616
+; CHECK-RV32-NEXT:  .LBB61_83: # %else306
+; CHECK-RV32-NEXT:    slli a2, a3, 17
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_84
+; CHECK-RV32-NEXT:    j .LBB61_617
+; CHECK-RV32-NEXT:  .LBB61_84: # %else310
+; CHECK-RV32-NEXT:    slli a2, a3, 16
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_85
+; CHECK-RV32-NEXT:    j .LBB61_618
+; CHECK-RV32-NEXT:  .LBB61_85: # %else314
+; CHECK-RV32-NEXT:    slli a2, a3, 15
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_86
+; CHECK-RV32-NEXT:    j .LBB61_619
+; CHECK-RV32-NEXT:  .LBB61_86: # %else318
+; CHECK-RV32-NEXT:    slli a2, a3, 14
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_87
+; CHECK-RV32-NEXT:    j .LBB61_620
+; CHECK-RV32-NEXT:  .LBB61_87: # %else322
+; CHECK-RV32-NEXT:    slli a2, a3, 13
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_88
+; CHECK-RV32-NEXT:    j .LBB61_621
+; CHECK-RV32-NEXT:  .LBB61_88: # %else326
+; CHECK-RV32-NEXT:    slli a2, a3, 12
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_89
+; CHECK-RV32-NEXT:    j .LBB61_622
+; CHECK-RV32-NEXT:  .LBB61_89: # %else330
+; CHECK-RV32-NEXT:    slli a2, a3, 11
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_90
+; CHECK-RV32-NEXT:    j .LBB61_623
+; CHECK-RV32-NEXT:  .LBB61_90: # %else334
+; CHECK-RV32-NEXT:    slli a2, a3, 10
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_91
+; CHECK-RV32-NEXT:    j .LBB61_624
+; CHECK-RV32-NEXT:  .LBB61_91: # %else338
+; CHECK-RV32-NEXT:    slli a2, a3, 9
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_92
+; CHECK-RV32-NEXT:    j .LBB61_625
+; CHECK-RV32-NEXT:  .LBB61_92: # %else342
+; CHECK-RV32-NEXT:    slli a2, a3, 8
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_93
+; CHECK-RV32-NEXT:    j .LBB61_626
+; CHECK-RV32-NEXT:  .LBB61_93: # %else346
+; CHECK-RV32-NEXT:    slli a2, a3, 7
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_94
+; CHECK-RV32-NEXT:    j .LBB61_627
+; CHECK-RV32-NEXT:  .LBB61_94: # %else350
+; CHECK-RV32-NEXT:    slli a2, a3, 6
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_95
+; CHECK-RV32-NEXT:    j .LBB61_628
+; CHECK-RV32-NEXT:  .LBB61_95: # %else354
+; CHECK-RV32-NEXT:    slli a2, a3, 5
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_96
+; CHECK-RV32-NEXT:    j .LBB61_629
+; CHECK-RV32-NEXT:  .LBB61_96: # %else358
+; CHECK-RV32-NEXT:    slli a2, a3, 4
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_97
+; CHECK-RV32-NEXT:    j .LBB61_630
+; CHECK-RV32-NEXT:  .LBB61_97: # %else362
+; CHECK-RV32-NEXT:    slli a2, a3, 3
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_98
+; CHECK-RV32-NEXT:    j .LBB61_631
+; CHECK-RV32-NEXT:  .LBB61_98: # %else366
+; CHECK-RV32-NEXT:    slli a2, a3, 2
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_100
+; CHECK-RV32-NEXT:  .LBB61_99: # %cond.load369
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 94
+; CHECK-RV32-NEXT:    li a4, 93
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:  .LBB61_100: # %else370
+; CHECK-RV32-NEXT:    slli a2, a3, 1
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vsrl.vx v16, v16, a1
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_102
+; CHECK-RV32-NEXT:  # %bb.101: # %cond.load373
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 95
+; CHECK-RV32-NEXT:    li a4, 94
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:  .LBB61_102: # %else374
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.x.s a2, v16
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_103
+; CHECK-RV32-NEXT:    j .LBB61_632
+; CHECK-RV32-NEXT:  .LBB61_103: # %else378
+; CHECK-RV32-NEXT:    andi a3, a2, 1
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_104
+; CHECK-RV32-NEXT:    j .LBB61_633
+; CHECK-RV32-NEXT:  .LBB61_104: # %else382
+; CHECK-RV32-NEXT:    andi a3, a2, 2
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_105
+; CHECK-RV32-NEXT:    j .LBB61_634
+; CHECK-RV32-NEXT:  .LBB61_105: # %else386
+; CHECK-RV32-NEXT:    andi a3, a2, 4
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_106
+; CHECK-RV32-NEXT:    j .LBB61_635
+; CHECK-RV32-NEXT:  .LBB61_106: # %else390
+; CHECK-RV32-NEXT:    andi a3, a2, 8
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_107
+; CHECK-RV32-NEXT:    j .LBB61_636
+; CHECK-RV32-NEXT:  .LBB61_107: # %else394
+; CHECK-RV32-NEXT:    andi a3, a2, 16
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_108
+; CHECK-RV32-NEXT:    j .LBB61_637
+; CHECK-RV32-NEXT:  .LBB61_108: # %else398
+; CHECK-RV32-NEXT:    andi a3, a2, 32
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_109
+; CHECK-RV32-NEXT:    j .LBB61_638
+; CHECK-RV32-NEXT:  .LBB61_109: # %else402
+; CHECK-RV32-NEXT:    andi a3, a2, 64
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_110
+; CHECK-RV32-NEXT:    j .LBB61_639
+; CHECK-RV32-NEXT:  .LBB61_110: # %else406
+; CHECK-RV32-NEXT:    andi a3, a2, 128
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_111
+; CHECK-RV32-NEXT:    j .LBB61_640
+; CHECK-RV32-NEXT:  .LBB61_111: # %else410
+; CHECK-RV32-NEXT:    andi a3, a2, 256
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_112
+; CHECK-RV32-NEXT:    j .LBB61_641
+; CHECK-RV32-NEXT:  .LBB61_112: # %else414
+; CHECK-RV32-NEXT:    andi a3, a2, 512
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_113
+; CHECK-RV32-NEXT:    j .LBB61_642
+; CHECK-RV32-NEXT:  .LBB61_113: # %else418
+; CHECK-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_114
+; CHECK-RV32-NEXT:    j .LBB61_643
+; CHECK-RV32-NEXT:  .LBB61_114: # %else422
+; CHECK-RV32-NEXT:    slli a3, a2, 20
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_115
+; CHECK-RV32-NEXT:    j .LBB61_644
+; CHECK-RV32-NEXT:  .LBB61_115: # %else426
+; CHECK-RV32-NEXT:    slli a3, a2, 19
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_116
+; CHECK-RV32-NEXT:    j .LBB61_645
+; CHECK-RV32-NEXT:  .LBB61_116: # %else430
+; CHECK-RV32-NEXT:    slli a3, a2, 18
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_117
+; CHECK-RV32-NEXT:    j .LBB61_646
+; CHECK-RV32-NEXT:  .LBB61_117: # %else434
+; CHECK-RV32-NEXT:    slli a3, a2, 17
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_118
+; CHECK-RV32-NEXT:    j .LBB61_647
+; CHECK-RV32-NEXT:  .LBB61_118: # %else438
+; CHECK-RV32-NEXT:    slli a3, a2, 16
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_119
+; CHECK-RV32-NEXT:    j .LBB61_648
+; CHECK-RV32-NEXT:  .LBB61_119: # %else442
+; CHECK-RV32-NEXT:    slli a3, a2, 15
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_120
+; CHECK-RV32-NEXT:    j .LBB61_649
+; CHECK-RV32-NEXT:  .LBB61_120: # %else446
+; CHECK-RV32-NEXT:    slli a3, a2, 14
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_121
+; CHECK-RV32-NEXT:    j .LBB61_650
+; CHECK-RV32-NEXT:  .LBB61_121: # %else450
+; CHECK-RV32-NEXT:    slli a3, a2, 13
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_122
+; CHECK-RV32-NEXT:    j .LBB61_651
+; CHECK-RV32-NEXT:  .LBB61_122: # %else454
+; CHECK-RV32-NEXT:    slli a3, a2, 12
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_123
+; CHECK-RV32-NEXT:    j .LBB61_652
+; CHECK-RV32-NEXT:  .LBB61_123: # %else458
+; CHECK-RV32-NEXT:    slli a3, a2, 11
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_124
+; CHECK-RV32-NEXT:    j .LBB61_653
+; CHECK-RV32-NEXT:  .LBB61_124: # %else462
+; CHECK-RV32-NEXT:    slli a3, a2, 10
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_125
+; CHECK-RV32-NEXT:    j .LBB61_654
+; CHECK-RV32-NEXT:  .LBB61_125: # %else466
+; CHECK-RV32-NEXT:    slli a3, a2, 9
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_126
+; CHECK-RV32-NEXT:    j .LBB61_655
+; CHECK-RV32-NEXT:  .LBB61_126: # %else470
+; CHECK-RV32-NEXT:    slli a3, a2, 8
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_127
+; CHECK-RV32-NEXT:    j .LBB61_656
+; CHECK-RV32-NEXT:  .LBB61_127: # %else474
+; CHECK-RV32-NEXT:    slli a3, a2, 7
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_128
+; CHECK-RV32-NEXT:    j .LBB61_657
+; CHECK-RV32-NEXT:  .LBB61_128: # %else478
+; CHECK-RV32-NEXT:    slli a3, a2, 6
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_129
+; CHECK-RV32-NEXT:    j .LBB61_658
+; CHECK-RV32-NEXT:  .LBB61_129: # %else482
+; CHECK-RV32-NEXT:    slli a3, a2, 5
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_130
+; CHECK-RV32-NEXT:    j .LBB61_659
+; CHECK-RV32-NEXT:  .LBB61_130: # %else486
+; CHECK-RV32-NEXT:    slli a3, a2, 4
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_131
+; CHECK-RV32-NEXT:    j .LBB61_660
+; CHECK-RV32-NEXT:  .LBB61_131: # %else490
+; CHECK-RV32-NEXT:    slli a3, a2, 3
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_132
+; CHECK-RV32-NEXT:    j .LBB61_661
+; CHECK-RV32-NEXT:  .LBB61_132: # %else494
+; CHECK-RV32-NEXT:    slli a3, a2, 2
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_134
+; CHECK-RV32-NEXT:  .LBB61_133: # %cond.load497
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 126
+; CHECK-RV32-NEXT:    li a4, 125
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:  .LBB61_134: # %else498
+; CHECK-RV32-NEXT:    slli a3, a2, 1
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vslidedown.vi v16, v0, 2
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_136
+; CHECK-RV32-NEXT:  # %bb.135: # %cond.load501
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v18, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 127
+; CHECK-RV32-NEXT:    li a4, 126
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:  .LBB61_136: # %else502
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.x.s a3, v16
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_137
+; CHECK-RV32-NEXT:    j .LBB61_662
+; CHECK-RV32-NEXT:  .LBB61_137: # %else506
+; CHECK-RV32-NEXT:    andi a2, a3, 1
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_138
+; CHECK-RV32-NEXT:    j .LBB61_663
+; CHECK-RV32-NEXT:  .LBB61_138: # %else510
+; CHECK-RV32-NEXT:    andi a2, a3, 2
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_139
+; CHECK-RV32-NEXT:    j .LBB61_664
+; CHECK-RV32-NEXT:  .LBB61_139: # %else514
+; CHECK-RV32-NEXT:    andi a2, a3, 4
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_140
+; CHECK-RV32-NEXT:    j .LBB61_665
+; CHECK-RV32-NEXT:  .LBB61_140: # %else518
+; CHECK-RV32-NEXT:    andi a2, a3, 8
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_141
+; CHECK-RV32-NEXT:    j .LBB61_666
+; CHECK-RV32-NEXT:  .LBB61_141: # %else522
+; CHECK-RV32-NEXT:    andi a2, a3, 16
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_142
+; CHECK-RV32-NEXT:    j .LBB61_667
+; CHECK-RV32-NEXT:  .LBB61_142: # %else526
+; CHECK-RV32-NEXT:    andi a2, a3, 32
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_143
+; CHECK-RV32-NEXT:    j .LBB61_668
+; CHECK-RV32-NEXT:  .LBB61_143: # %else530
+; CHECK-RV32-NEXT:    andi a2, a3, 64
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_144
+; CHECK-RV32-NEXT:    j .LBB61_669
+; CHECK-RV32-NEXT:  .LBB61_144: # %else534
+; CHECK-RV32-NEXT:    andi a2, a3, 128
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_145
+; CHECK-RV32-NEXT:    j .LBB61_670
+; CHECK-RV32-NEXT:  .LBB61_145: # %else538
+; CHECK-RV32-NEXT:    andi a2, a3, 256
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_146
+; CHECK-RV32-NEXT:    j .LBB61_671
+; CHECK-RV32-NEXT:  .LBB61_146: # %else542
+; CHECK-RV32-NEXT:    andi a2, a3, 512
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_147
+; CHECK-RV32-NEXT:    j .LBB61_672
+; CHECK-RV32-NEXT:  .LBB61_147: # %else546
+; CHECK-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_148
+; CHECK-RV32-NEXT:    j .LBB61_673
+; CHECK-RV32-NEXT:  .LBB61_148: # %else550
+; CHECK-RV32-NEXT:    slli a2, a3, 20
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_149
+; CHECK-RV32-NEXT:    j .LBB61_674
+; CHECK-RV32-NEXT:  .LBB61_149: # %else554
+; CHECK-RV32-NEXT:    slli a2, a3, 19
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_150
+; CHECK-RV32-NEXT:    j .LBB61_675
+; CHECK-RV32-NEXT:  .LBB61_150: # %else558
+; CHECK-RV32-NEXT:    slli a2, a3, 18
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_151
+; CHECK-RV32-NEXT:    j .LBB61_676
+; CHECK-RV32-NEXT:  .LBB61_151: # %else562
+; CHECK-RV32-NEXT:    slli a2, a3, 17
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_152
+; CHECK-RV32-NEXT:    j .LBB61_677
+; CHECK-RV32-NEXT:  .LBB61_152: # %else566
+; CHECK-RV32-NEXT:    slli a2, a3, 16
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_153
+; CHECK-RV32-NEXT:    j .LBB61_678
+; CHECK-RV32-NEXT:  .LBB61_153: # %else570
+; CHECK-RV32-NEXT:    slli a2, a3, 15
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_154
+; CHECK-RV32-NEXT:    j .LBB61_679
+; CHECK-RV32-NEXT:  .LBB61_154: # %else574
+; CHECK-RV32-NEXT:    slli a2, a3, 14
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_155
+; CHECK-RV32-NEXT:    j .LBB61_680
+; CHECK-RV32-NEXT:  .LBB61_155: # %else578
+; CHECK-RV32-NEXT:    slli a2, a3, 13
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_156
+; CHECK-RV32-NEXT:    j .LBB61_681
+; CHECK-RV32-NEXT:  .LBB61_156: # %else582
+; CHECK-RV32-NEXT:    slli a2, a3, 12
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_157
+; CHECK-RV32-NEXT:    j .LBB61_682
+; CHECK-RV32-NEXT:  .LBB61_157: # %else586
+; CHECK-RV32-NEXT:    slli a2, a3, 11
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_158
+; CHECK-RV32-NEXT:    j .LBB61_683
+; CHECK-RV32-NEXT:  .LBB61_158: # %else590
+; CHECK-RV32-NEXT:    slli a2, a3, 10
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_159
+; CHECK-RV32-NEXT:    j .LBB61_684
+; CHECK-RV32-NEXT:  .LBB61_159: # %else594
+; CHECK-RV32-NEXT:    slli a2, a3, 9
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_160
+; CHECK-RV32-NEXT:    j .LBB61_685
+; CHECK-RV32-NEXT:  .LBB61_160: # %else598
+; CHECK-RV32-NEXT:    slli a2, a3, 8
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_161
+; CHECK-RV32-NEXT:    j .LBB61_686
+; CHECK-RV32-NEXT:  .LBB61_161: # %else602
+; CHECK-RV32-NEXT:    slli a2, a3, 7
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_162
+; CHECK-RV32-NEXT:    j .LBB61_687
+; CHECK-RV32-NEXT:  .LBB61_162: # %else606
+; CHECK-RV32-NEXT:    slli a2, a3, 6
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_163
+; CHECK-RV32-NEXT:    j .LBB61_688
+; CHECK-RV32-NEXT:  .LBB61_163: # %else610
+; CHECK-RV32-NEXT:    slli a2, a3, 5
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_164
+; CHECK-RV32-NEXT:    j .LBB61_689
+; CHECK-RV32-NEXT:  .LBB61_164: # %else614
+; CHECK-RV32-NEXT:    slli a2, a3, 4
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_165
+; CHECK-RV32-NEXT:    j .LBB61_690
+; CHECK-RV32-NEXT:  .LBB61_165: # %else618
+; CHECK-RV32-NEXT:    slli a2, a3, 3
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_166
+; CHECK-RV32-NEXT:    j .LBB61_691
+; CHECK-RV32-NEXT:  .LBB61_166: # %else622
+; CHECK-RV32-NEXT:    slli a2, a3, 2
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_168
+; CHECK-RV32-NEXT:  .LBB61_167: # %cond.load625
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 158
+; CHECK-RV32-NEXT:    li a4, 157
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:  .LBB61_168: # %else626
+; CHECK-RV32-NEXT:    slli a2, a3, 1
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vsrl.vx v16, v16, a1
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_170
+; CHECK-RV32-NEXT:  # %bb.169: # %cond.load629
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 159
+; CHECK-RV32-NEXT:    li a4, 158
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:  .LBB61_170: # %else630
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.x.s a2, v16
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_171
+; CHECK-RV32-NEXT:    j .LBB61_692
+; CHECK-RV32-NEXT:  .LBB61_171: # %else634
+; CHECK-RV32-NEXT:    andi a3, a2, 1
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_172
+; CHECK-RV32-NEXT:    j .LBB61_693
+; CHECK-RV32-NEXT:  .LBB61_172: # %else638
+; CHECK-RV32-NEXT:    andi a3, a2, 2
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_173
+; CHECK-RV32-NEXT:    j .LBB61_694
+; CHECK-RV32-NEXT:  .LBB61_173: # %else642
+; CHECK-RV32-NEXT:    andi a3, a2, 4
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_174
+; CHECK-RV32-NEXT:    j .LBB61_695
+; CHECK-RV32-NEXT:  .LBB61_174: # %else646
+; CHECK-RV32-NEXT:    andi a3, a2, 8
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_175
+; CHECK-RV32-NEXT:    j .LBB61_696
+; CHECK-RV32-NEXT:  .LBB61_175: # %else650
+; CHECK-RV32-NEXT:    andi a3, a2, 16
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_176
+; CHECK-RV32-NEXT:    j .LBB61_697
+; CHECK-RV32-NEXT:  .LBB61_176: # %else654
+; CHECK-RV32-NEXT:    andi a3, a2, 32
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_177
+; CHECK-RV32-NEXT:    j .LBB61_698
+; CHECK-RV32-NEXT:  .LBB61_177: # %else658
+; CHECK-RV32-NEXT:    andi a3, a2, 64
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_178
+; CHECK-RV32-NEXT:    j .LBB61_699
+; CHECK-RV32-NEXT:  .LBB61_178: # %else662
+; CHECK-RV32-NEXT:    andi a3, a2, 128
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_179
+; CHECK-RV32-NEXT:    j .LBB61_700
+; CHECK-RV32-NEXT:  .LBB61_179: # %else666
+; CHECK-RV32-NEXT:    andi a3, a2, 256
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_180
+; CHECK-RV32-NEXT:    j .LBB61_701
+; CHECK-RV32-NEXT:  .LBB61_180: # %else670
+; CHECK-RV32-NEXT:    andi a3, a2, 512
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_181
+; CHECK-RV32-NEXT:    j .LBB61_702
+; CHECK-RV32-NEXT:  .LBB61_181: # %else674
+; CHECK-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_182
+; CHECK-RV32-NEXT:    j .LBB61_703
+; CHECK-RV32-NEXT:  .LBB61_182: # %else678
+; CHECK-RV32-NEXT:    slli a3, a2, 20
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_183
+; CHECK-RV32-NEXT:    j .LBB61_704
+; CHECK-RV32-NEXT:  .LBB61_183: # %else682
+; CHECK-RV32-NEXT:    slli a3, a2, 19
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_184
+; CHECK-RV32-NEXT:    j .LBB61_705
+; CHECK-RV32-NEXT:  .LBB61_184: # %else686
+; CHECK-RV32-NEXT:    slli a3, a2, 18
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_185
+; CHECK-RV32-NEXT:    j .LBB61_706
+; CHECK-RV32-NEXT:  .LBB61_185: # %else690
+; CHECK-RV32-NEXT:    slli a3, a2, 17
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_186
+; CHECK-RV32-NEXT:    j .LBB61_707
+; CHECK-RV32-NEXT:  .LBB61_186: # %else694
+; CHECK-RV32-NEXT:    slli a3, a2, 16
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_187
+; CHECK-RV32-NEXT:    j .LBB61_708
+; CHECK-RV32-NEXT:  .LBB61_187: # %else698
+; CHECK-RV32-NEXT:    slli a3, a2, 15
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_188
+; CHECK-RV32-NEXT:    j .LBB61_709
+; CHECK-RV32-NEXT:  .LBB61_188: # %else702
+; CHECK-RV32-NEXT:    slli a3, a2, 14
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_189
+; CHECK-RV32-NEXT:    j .LBB61_710
+; CHECK-RV32-NEXT:  .LBB61_189: # %else706
+; CHECK-RV32-NEXT:    slli a3, a2, 13
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_190
+; CHECK-RV32-NEXT:    j .LBB61_711
+; CHECK-RV32-NEXT:  .LBB61_190: # %else710
+; CHECK-RV32-NEXT:    slli a3, a2, 12
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_191
+; CHECK-RV32-NEXT:    j .LBB61_712
+; CHECK-RV32-NEXT:  .LBB61_191: # %else714
+; CHECK-RV32-NEXT:    slli a3, a2, 11
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_192
+; CHECK-RV32-NEXT:    j .LBB61_713
+; CHECK-RV32-NEXT:  .LBB61_192: # %else718
+; CHECK-RV32-NEXT:    slli a3, a2, 10
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_193
+; CHECK-RV32-NEXT:    j .LBB61_714
+; CHECK-RV32-NEXT:  .LBB61_193: # %else722
+; CHECK-RV32-NEXT:    slli a3, a2, 9
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_194
+; CHECK-RV32-NEXT:    j .LBB61_715
+; CHECK-RV32-NEXT:  .LBB61_194: # %else726
+; CHECK-RV32-NEXT:    slli a3, a2, 8
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_195
+; CHECK-RV32-NEXT:    j .LBB61_716
+; CHECK-RV32-NEXT:  .LBB61_195: # %else730
+; CHECK-RV32-NEXT:    slli a3, a2, 7
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_196
+; CHECK-RV32-NEXT:    j .LBB61_717
+; CHECK-RV32-NEXT:  .LBB61_196: # %else734
+; CHECK-RV32-NEXT:    slli a3, a2, 6
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_197
+; CHECK-RV32-NEXT:    j .LBB61_718
+; CHECK-RV32-NEXT:  .LBB61_197: # %else738
+; CHECK-RV32-NEXT:    slli a3, a2, 5
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_198
+; CHECK-RV32-NEXT:    j .LBB61_719
+; CHECK-RV32-NEXT:  .LBB61_198: # %else742
+; CHECK-RV32-NEXT:    slli a3, a2, 4
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_199
+; CHECK-RV32-NEXT:    j .LBB61_720
+; CHECK-RV32-NEXT:  .LBB61_199: # %else746
+; CHECK-RV32-NEXT:    slli a3, a2, 3
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_200
+; CHECK-RV32-NEXT:    j .LBB61_721
+; CHECK-RV32-NEXT:  .LBB61_200: # %else750
+; CHECK-RV32-NEXT:    slli a3, a2, 2
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_202
+; CHECK-RV32-NEXT:  .LBB61_201: # %cond.load753
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 190
+; CHECK-RV32-NEXT:    li a4, 189
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:  .LBB61_202: # %else754
+; CHECK-RV32-NEXT:    slli a3, a2, 1
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vslidedown.vi v16, v0, 3
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_204
+; CHECK-RV32-NEXT:  # %bb.203: # %cond.load757
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v20, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 191
+; CHECK-RV32-NEXT:    li a4, 190
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:  .LBB61_204: # %else758
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.x.s a3, v16
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_205
+; CHECK-RV32-NEXT:    j .LBB61_722
+; CHECK-RV32-NEXT:  .LBB61_205: # %else762
+; CHECK-RV32-NEXT:    andi a2, a3, 1
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_206
+; CHECK-RV32-NEXT:    j .LBB61_723
+; CHECK-RV32-NEXT:  .LBB61_206: # %else766
+; CHECK-RV32-NEXT:    andi a2, a3, 2
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_207
+; CHECK-RV32-NEXT:    j .LBB61_724
+; CHECK-RV32-NEXT:  .LBB61_207: # %else770
+; CHECK-RV32-NEXT:    andi a2, a3, 4
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_208
+; CHECK-RV32-NEXT:    j .LBB61_725
+; CHECK-RV32-NEXT:  .LBB61_208: # %else774
+; CHECK-RV32-NEXT:    andi a2, a3, 8
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_209
+; CHECK-RV32-NEXT:    j .LBB61_726
+; CHECK-RV32-NEXT:  .LBB61_209: # %else778
+; CHECK-RV32-NEXT:    andi a2, a3, 16
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_210
+; CHECK-RV32-NEXT:    j .LBB61_727
+; CHECK-RV32-NEXT:  .LBB61_210: # %else782
+; CHECK-RV32-NEXT:    andi a2, a3, 32
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_211
+; CHECK-RV32-NEXT:    j .LBB61_728
+; CHECK-RV32-NEXT:  .LBB61_211: # %else786
+; CHECK-RV32-NEXT:    andi a2, a3, 64
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_212
+; CHECK-RV32-NEXT:    j .LBB61_729
+; CHECK-RV32-NEXT:  .LBB61_212: # %else790
+; CHECK-RV32-NEXT:    andi a2, a3, 128
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_213
+; CHECK-RV32-NEXT:    j .LBB61_730
+; CHECK-RV32-NEXT:  .LBB61_213: # %else794
+; CHECK-RV32-NEXT:    andi a2, a3, 256
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_214
+; CHECK-RV32-NEXT:    j .LBB61_731
+; CHECK-RV32-NEXT:  .LBB61_214: # %else798
+; CHECK-RV32-NEXT:    andi a2, a3, 512
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_215
+; CHECK-RV32-NEXT:    j .LBB61_732
+; CHECK-RV32-NEXT:  .LBB61_215: # %else802
+; CHECK-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_216
+; CHECK-RV32-NEXT:    j .LBB61_733
+; CHECK-RV32-NEXT:  .LBB61_216: # %else806
+; CHECK-RV32-NEXT:    slli a2, a3, 20
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_217
+; CHECK-RV32-NEXT:    j .LBB61_734
+; CHECK-RV32-NEXT:  .LBB61_217: # %else810
+; CHECK-RV32-NEXT:    slli a2, a3, 19
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_218
+; CHECK-RV32-NEXT:    j .LBB61_735
+; CHECK-RV32-NEXT:  .LBB61_218: # %else814
+; CHECK-RV32-NEXT:    slli a2, a3, 18
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_219
+; CHECK-RV32-NEXT:    j .LBB61_736
+; CHECK-RV32-NEXT:  .LBB61_219: # %else818
+; CHECK-RV32-NEXT:    slli a2, a3, 17
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_220
+; CHECK-RV32-NEXT:    j .LBB61_737
+; CHECK-RV32-NEXT:  .LBB61_220: # %else822
+; CHECK-RV32-NEXT:    slli a2, a3, 16
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_221
+; CHECK-RV32-NEXT:    j .LBB61_738
+; CHECK-RV32-NEXT:  .LBB61_221: # %else826
+; CHECK-RV32-NEXT:    slli a2, a3, 15
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_222
+; CHECK-RV32-NEXT:    j .LBB61_739
+; CHECK-RV32-NEXT:  .LBB61_222: # %else830
+; CHECK-RV32-NEXT:    slli a2, a3, 14
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_223
+; CHECK-RV32-NEXT:    j .LBB61_740
+; CHECK-RV32-NEXT:  .LBB61_223: # %else834
+; CHECK-RV32-NEXT:    slli a2, a3, 13
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_224
+; CHECK-RV32-NEXT:    j .LBB61_741
+; CHECK-RV32-NEXT:  .LBB61_224: # %else838
+; CHECK-RV32-NEXT:    slli a2, a3, 12
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_225
+; CHECK-RV32-NEXT:    j .LBB61_742
+; CHECK-RV32-NEXT:  .LBB61_225: # %else842
+; CHECK-RV32-NEXT:    slli a2, a3, 11
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_226
+; CHECK-RV32-NEXT:    j .LBB61_743
+; CHECK-RV32-NEXT:  .LBB61_226: # %else846
+; CHECK-RV32-NEXT:    slli a2, a3, 10
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_227
+; CHECK-RV32-NEXT:    j .LBB61_744
+; CHECK-RV32-NEXT:  .LBB61_227: # %else850
+; CHECK-RV32-NEXT:    slli a2, a3, 9
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_228
+; CHECK-RV32-NEXT:    j .LBB61_745
+; CHECK-RV32-NEXT:  .LBB61_228: # %else854
+; CHECK-RV32-NEXT:    slli a2, a3, 8
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_229
+; CHECK-RV32-NEXT:    j .LBB61_746
+; CHECK-RV32-NEXT:  .LBB61_229: # %else858
+; CHECK-RV32-NEXT:    slli a2, a3, 7
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_230
+; CHECK-RV32-NEXT:    j .LBB61_747
+; CHECK-RV32-NEXT:  .LBB61_230: # %else862
+; CHECK-RV32-NEXT:    slli a2, a3, 6
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_231
+; CHECK-RV32-NEXT:    j .LBB61_748
+; CHECK-RV32-NEXT:  .LBB61_231: # %else866
+; CHECK-RV32-NEXT:    slli a2, a3, 5
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_232
+; CHECK-RV32-NEXT:    j .LBB61_749
+; CHECK-RV32-NEXT:  .LBB61_232: # %else870
+; CHECK-RV32-NEXT:    slli a2, a3, 4
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_233
+; CHECK-RV32-NEXT:    j .LBB61_750
+; CHECK-RV32-NEXT:  .LBB61_233: # %else874
+; CHECK-RV32-NEXT:    slli a2, a3, 3
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_234
+; CHECK-RV32-NEXT:    j .LBB61_751
+; CHECK-RV32-NEXT:  .LBB61_234: # %else878
+; CHECK-RV32-NEXT:    slli a2, a3, 2
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_236
+; CHECK-RV32-NEXT:  .LBB61_235: # %cond.load881
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 222
+; CHECK-RV32-NEXT:    li a4, 221
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:  .LBB61_236: # %else882
+; CHECK-RV32-NEXT:    slli a2, a3, 1
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vsrl.vx v16, v16, a1
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_238
+; CHECK-RV32-NEXT:  # %bb.237: # %cond.load885
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 223
+; CHECK-RV32-NEXT:    li a4, 222
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:  .LBB61_238: # %else886
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.x.s a2, v16
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_239
+; CHECK-RV32-NEXT:    j .LBB61_752
+; CHECK-RV32-NEXT:  .LBB61_239: # %else890
+; CHECK-RV32-NEXT:    andi a3, a2, 1
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_240
+; CHECK-RV32-NEXT:    j .LBB61_753
+; CHECK-RV32-NEXT:  .LBB61_240: # %else894
+; CHECK-RV32-NEXT:    andi a3, a2, 2
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_241
+; CHECK-RV32-NEXT:    j .LBB61_754
+; CHECK-RV32-NEXT:  .LBB61_241: # %else898
+; CHECK-RV32-NEXT:    andi a3, a2, 4
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_242
+; CHECK-RV32-NEXT:    j .LBB61_755
+; CHECK-RV32-NEXT:  .LBB61_242: # %else902
+; CHECK-RV32-NEXT:    andi a3, a2, 8
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_243
+; CHECK-RV32-NEXT:    j .LBB61_756
+; CHECK-RV32-NEXT:  .LBB61_243: # %else906
+; CHECK-RV32-NEXT:    andi a3, a2, 16
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_244
+; CHECK-RV32-NEXT:    j .LBB61_757
+; CHECK-RV32-NEXT:  .LBB61_244: # %else910
+; CHECK-RV32-NEXT:    andi a3, a2, 32
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_245
+; CHECK-RV32-NEXT:    j .LBB61_758
+; CHECK-RV32-NEXT:  .LBB61_245: # %else914
+; CHECK-RV32-NEXT:    andi a3, a2, 64
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_246
+; CHECK-RV32-NEXT:    j .LBB61_759
+; CHECK-RV32-NEXT:  .LBB61_246: # %else918
+; CHECK-RV32-NEXT:    andi a3, a2, 128
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_247
+; CHECK-RV32-NEXT:    j .LBB61_760
+; CHECK-RV32-NEXT:  .LBB61_247: # %else922
+; CHECK-RV32-NEXT:    andi a3, a2, 256
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_248
+; CHECK-RV32-NEXT:    j .LBB61_761
+; CHECK-RV32-NEXT:  .LBB61_248: # %else926
+; CHECK-RV32-NEXT:    andi a3, a2, 512
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_249
+; CHECK-RV32-NEXT:    j .LBB61_762
+; CHECK-RV32-NEXT:  .LBB61_249: # %else930
+; CHECK-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_250
+; CHECK-RV32-NEXT:    j .LBB61_763
+; CHECK-RV32-NEXT:  .LBB61_250: # %else934
+; CHECK-RV32-NEXT:    slli a3, a2, 20
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_251
+; CHECK-RV32-NEXT:    j .LBB61_764
+; CHECK-RV32-NEXT:  .LBB61_251: # %else938
+; CHECK-RV32-NEXT:    slli a3, a2, 19
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_252
+; CHECK-RV32-NEXT:    j .LBB61_765
+; CHECK-RV32-NEXT:  .LBB61_252: # %else942
+; CHECK-RV32-NEXT:    slli a3, a2, 18
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_253
+; CHECK-RV32-NEXT:    j .LBB61_766
+; CHECK-RV32-NEXT:  .LBB61_253: # %else946
+; CHECK-RV32-NEXT:    slli a3, a2, 17
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_254
+; CHECK-RV32-NEXT:    j .LBB61_767
+; CHECK-RV32-NEXT:  .LBB61_254: # %else950
+; CHECK-RV32-NEXT:    slli a3, a2, 16
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_255
+; CHECK-RV32-NEXT:    j .LBB61_768
+; CHECK-RV32-NEXT:  .LBB61_255: # %else954
+; CHECK-RV32-NEXT:    slli a3, a2, 15
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_256
+; CHECK-RV32-NEXT:    j .LBB61_769
+; CHECK-RV32-NEXT:  .LBB61_256: # %else958
+; CHECK-RV32-NEXT:    slli a3, a2, 14
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_257
+; CHECK-RV32-NEXT:    j .LBB61_770
+; CHECK-RV32-NEXT:  .LBB61_257: # %else962
+; CHECK-RV32-NEXT:    slli a3, a2, 13
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_258
+; CHECK-RV32-NEXT:    j .LBB61_771
+; CHECK-RV32-NEXT:  .LBB61_258: # %else966
+; CHECK-RV32-NEXT:    slli a3, a2, 12
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_259
+; CHECK-RV32-NEXT:    j .LBB61_772
+; CHECK-RV32-NEXT:  .LBB61_259: # %else970
+; CHECK-RV32-NEXT:    slli a3, a2, 11
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_260
+; CHECK-RV32-NEXT:    j .LBB61_773
+; CHECK-RV32-NEXT:  .LBB61_260: # %else974
+; CHECK-RV32-NEXT:    slli a3, a2, 10
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_261
+; CHECK-RV32-NEXT:    j .LBB61_774
+; CHECK-RV32-NEXT:  .LBB61_261: # %else978
+; CHECK-RV32-NEXT:    slli a3, a2, 9
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_262
+; CHECK-RV32-NEXT:    j .LBB61_775
+; CHECK-RV32-NEXT:  .LBB61_262: # %else982
+; CHECK-RV32-NEXT:    slli a3, a2, 8
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_263
+; CHECK-RV32-NEXT:    j .LBB61_776
+; CHECK-RV32-NEXT:  .LBB61_263: # %else986
+; CHECK-RV32-NEXT:    slli a3, a2, 7
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_264
+; CHECK-RV32-NEXT:    j .LBB61_777
+; CHECK-RV32-NEXT:  .LBB61_264: # %else990
+; CHECK-RV32-NEXT:    slli a3, a2, 6
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_265
+; CHECK-RV32-NEXT:    j .LBB61_778
+; CHECK-RV32-NEXT:  .LBB61_265: # %else994
+; CHECK-RV32-NEXT:    slli a3, a2, 5
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_266
+; CHECK-RV32-NEXT:    j .LBB61_779
+; CHECK-RV32-NEXT:  .LBB61_266: # %else998
+; CHECK-RV32-NEXT:    slli a3, a2, 4
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_267
+; CHECK-RV32-NEXT:    j .LBB61_780
+; CHECK-RV32-NEXT:  .LBB61_267: # %else1002
+; CHECK-RV32-NEXT:    slli a3, a2, 3
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_268
+; CHECK-RV32-NEXT:    j .LBB61_781
+; CHECK-RV32-NEXT:  .LBB61_268: # %else1006
+; CHECK-RV32-NEXT:    slli a3, a2, 2
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_270
+; CHECK-RV32-NEXT:  .LBB61_269: # %cond.load1009
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 254
+; CHECK-RV32-NEXT:    li a4, 253
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:  .LBB61_270: # %else1010
+; CHECK-RV32-NEXT:    slli a3, a2, 1
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vslidedown.vi v16, v0, 4
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_272
+; CHECK-RV32-NEXT:  # %bb.271: # %cond.load1013
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v20, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 255
+; CHECK-RV32-NEXT:    li a4, 254
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:  .LBB61_272: # %else1014
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.x.s a3, v16
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_273
+; CHECK-RV32-NEXT:    j .LBB61_782
+; CHECK-RV32-NEXT:  .LBB61_273: # %else1018
+; CHECK-RV32-NEXT:    andi a2, a3, 1
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_274
+; CHECK-RV32-NEXT:    j .LBB61_783
+; CHECK-RV32-NEXT:  .LBB61_274: # %else1022
+; CHECK-RV32-NEXT:    andi a2, a3, 2
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_275
+; CHECK-RV32-NEXT:    j .LBB61_784
+; CHECK-RV32-NEXT:  .LBB61_275: # %else1026
+; CHECK-RV32-NEXT:    andi a2, a3, 4
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_276
+; CHECK-RV32-NEXT:    j .LBB61_785
+; CHECK-RV32-NEXT:  .LBB61_276: # %else1030
+; CHECK-RV32-NEXT:    andi a2, a3, 8
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_277
+; CHECK-RV32-NEXT:    j .LBB61_786
+; CHECK-RV32-NEXT:  .LBB61_277: # %else1034
+; CHECK-RV32-NEXT:    andi a2, a3, 16
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_278
+; CHECK-RV32-NEXT:    j .LBB61_787
+; CHECK-RV32-NEXT:  .LBB61_278: # %else1038
+; CHECK-RV32-NEXT:    andi a2, a3, 32
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_279
+; CHECK-RV32-NEXT:    j .LBB61_788
+; CHECK-RV32-NEXT:  .LBB61_279: # %else1042
+; CHECK-RV32-NEXT:    andi a2, a3, 64
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_280
+; CHECK-RV32-NEXT:    j .LBB61_789
+; CHECK-RV32-NEXT:  .LBB61_280: # %else1046
+; CHECK-RV32-NEXT:    andi a2, a3, 128
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_281
+; CHECK-RV32-NEXT:    j .LBB61_790
+; CHECK-RV32-NEXT:  .LBB61_281: # %else1050
+; CHECK-RV32-NEXT:    andi a2, a3, 256
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_282
+; CHECK-RV32-NEXT:    j .LBB61_791
+; CHECK-RV32-NEXT:  .LBB61_282: # %else1054
+; CHECK-RV32-NEXT:    andi a2, a3, 512
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_283
+; CHECK-RV32-NEXT:    j .LBB61_792
+; CHECK-RV32-NEXT:  .LBB61_283: # %else1058
+; CHECK-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_284
+; CHECK-RV32-NEXT:    j .LBB61_793
+; CHECK-RV32-NEXT:  .LBB61_284: # %else1062
+; CHECK-RV32-NEXT:    slli a2, a3, 20
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_285
+; CHECK-RV32-NEXT:    j .LBB61_794
+; CHECK-RV32-NEXT:  .LBB61_285: # %else1066
+; CHECK-RV32-NEXT:    slli a2, a3, 19
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_286
+; CHECK-RV32-NEXT:    j .LBB61_795
+; CHECK-RV32-NEXT:  .LBB61_286: # %else1070
+; CHECK-RV32-NEXT:    slli a2, a3, 18
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_287
+; CHECK-RV32-NEXT:    j .LBB61_796
+; CHECK-RV32-NEXT:  .LBB61_287: # %else1074
+; CHECK-RV32-NEXT:    slli a2, a3, 17
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_288
+; CHECK-RV32-NEXT:    j .LBB61_797
+; CHECK-RV32-NEXT:  .LBB61_288: # %else1078
+; CHECK-RV32-NEXT:    slli a2, a3, 16
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_289
+; CHECK-RV32-NEXT:    j .LBB61_798
+; CHECK-RV32-NEXT:  .LBB61_289: # %else1082
+; CHECK-RV32-NEXT:    slli a2, a3, 15
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_290
+; CHECK-RV32-NEXT:    j .LBB61_799
+; CHECK-RV32-NEXT:  .LBB61_290: # %else1086
+; CHECK-RV32-NEXT:    slli a2, a3, 14
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_291
+; CHECK-RV32-NEXT:    j .LBB61_800
+; CHECK-RV32-NEXT:  .LBB61_291: # %else1090
+; CHECK-RV32-NEXT:    slli a2, a3, 13
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_292
+; CHECK-RV32-NEXT:    j .LBB61_801
+; CHECK-RV32-NEXT:  .LBB61_292: # %else1094
+; CHECK-RV32-NEXT:    slli a2, a3, 12
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_293
+; CHECK-RV32-NEXT:    j .LBB61_802
+; CHECK-RV32-NEXT:  .LBB61_293: # %else1098
+; CHECK-RV32-NEXT:    slli a2, a3, 11
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_294
+; CHECK-RV32-NEXT:    j .LBB61_803
+; CHECK-RV32-NEXT:  .LBB61_294: # %else1102
+; CHECK-RV32-NEXT:    slli a2, a3, 10
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_295
+; CHECK-RV32-NEXT:    j .LBB61_804
+; CHECK-RV32-NEXT:  .LBB61_295: # %else1106
+; CHECK-RV32-NEXT:    slli a2, a3, 9
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_296
+; CHECK-RV32-NEXT:    j .LBB61_805
+; CHECK-RV32-NEXT:  .LBB61_296: # %else1110
+; CHECK-RV32-NEXT:    slli a2, a3, 8
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_297
+; CHECK-RV32-NEXT:    j .LBB61_806
+; CHECK-RV32-NEXT:  .LBB61_297: # %else1114
+; CHECK-RV32-NEXT:    slli a2, a3, 7
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_298
+; CHECK-RV32-NEXT:    j .LBB61_807
+; CHECK-RV32-NEXT:  .LBB61_298: # %else1118
+; CHECK-RV32-NEXT:    slli a2, a3, 6
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_299
+; CHECK-RV32-NEXT:    j .LBB61_808
+; CHECK-RV32-NEXT:  .LBB61_299: # %else1122
+; CHECK-RV32-NEXT:    slli a2, a3, 5
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_300
+; CHECK-RV32-NEXT:    j .LBB61_809
+; CHECK-RV32-NEXT:  .LBB61_300: # %else1126
+; CHECK-RV32-NEXT:    slli a2, a3, 4
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_301
+; CHECK-RV32-NEXT:    j .LBB61_810
+; CHECK-RV32-NEXT:  .LBB61_301: # %else1130
+; CHECK-RV32-NEXT:    slli a2, a3, 3
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_302
+; CHECK-RV32-NEXT:    j .LBB61_811
+; CHECK-RV32-NEXT:  .LBB61_302: # %else1134
+; CHECK-RV32-NEXT:    slli a2, a3, 2
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_304
+; CHECK-RV32-NEXT:  .LBB61_303: # %cond.load1137
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 286
+; CHECK-RV32-NEXT:    li a4, 285
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:  .LBB61_304: # %else1138
+; CHECK-RV32-NEXT:    slli a2, a3, 1
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vsrl.vx v16, v16, a1
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_306
+; CHECK-RV32-NEXT:  # %bb.305: # %cond.load1141
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 287
+; CHECK-RV32-NEXT:    li a4, 286
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:  .LBB61_306: # %else1142
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.x.s a2, v16
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_307
+; CHECK-RV32-NEXT:    j .LBB61_812
+; CHECK-RV32-NEXT:  .LBB61_307: # %else1146
+; CHECK-RV32-NEXT:    andi a3, a2, 1
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_308
+; CHECK-RV32-NEXT:    j .LBB61_813
+; CHECK-RV32-NEXT:  .LBB61_308: # %else1150
+; CHECK-RV32-NEXT:    andi a3, a2, 2
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_309
+; CHECK-RV32-NEXT:    j .LBB61_814
+; CHECK-RV32-NEXT:  .LBB61_309: # %else1154
+; CHECK-RV32-NEXT:    andi a3, a2, 4
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_310
+; CHECK-RV32-NEXT:    j .LBB61_815
+; CHECK-RV32-NEXT:  .LBB61_310: # %else1158
+; CHECK-RV32-NEXT:    andi a3, a2, 8
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_311
+; CHECK-RV32-NEXT:    j .LBB61_816
+; CHECK-RV32-NEXT:  .LBB61_311: # %else1162
+; CHECK-RV32-NEXT:    andi a3, a2, 16
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_312
+; CHECK-RV32-NEXT:    j .LBB61_817
+; CHECK-RV32-NEXT:  .LBB61_312: # %else1166
+; CHECK-RV32-NEXT:    andi a3, a2, 32
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_313
+; CHECK-RV32-NEXT:    j .LBB61_818
+; CHECK-RV32-NEXT:  .LBB61_313: # %else1170
+; CHECK-RV32-NEXT:    andi a3, a2, 64
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_314
+; CHECK-RV32-NEXT:    j .LBB61_819
+; CHECK-RV32-NEXT:  .LBB61_314: # %else1174
+; CHECK-RV32-NEXT:    andi a3, a2, 128
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_315
+; CHECK-RV32-NEXT:    j .LBB61_820
+; CHECK-RV32-NEXT:  .LBB61_315: # %else1178
+; CHECK-RV32-NEXT:    andi a3, a2, 256
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_316
+; CHECK-RV32-NEXT:    j .LBB61_821
+; CHECK-RV32-NEXT:  .LBB61_316: # %else1182
+; CHECK-RV32-NEXT:    andi a3, a2, 512
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_317
+; CHECK-RV32-NEXT:    j .LBB61_822
+; CHECK-RV32-NEXT:  .LBB61_317: # %else1186
+; CHECK-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_318
+; CHECK-RV32-NEXT:    j .LBB61_823
+; CHECK-RV32-NEXT:  .LBB61_318: # %else1190
+; CHECK-RV32-NEXT:    slli a3, a2, 20
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_319
+; CHECK-RV32-NEXT:    j .LBB61_824
+; CHECK-RV32-NEXT:  .LBB61_319: # %else1194
+; CHECK-RV32-NEXT:    slli a3, a2, 19
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_320
+; CHECK-RV32-NEXT:    j .LBB61_825
+; CHECK-RV32-NEXT:  .LBB61_320: # %else1198
+; CHECK-RV32-NEXT:    slli a3, a2, 18
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_321
+; CHECK-RV32-NEXT:    j .LBB61_826
+; CHECK-RV32-NEXT:  .LBB61_321: # %else1202
+; CHECK-RV32-NEXT:    slli a3, a2, 17
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_322
+; CHECK-RV32-NEXT:    j .LBB61_827
+; CHECK-RV32-NEXT:  .LBB61_322: # %else1206
+; CHECK-RV32-NEXT:    slli a3, a2, 16
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_323
+; CHECK-RV32-NEXT:    j .LBB61_828
+; CHECK-RV32-NEXT:  .LBB61_323: # %else1210
+; CHECK-RV32-NEXT:    slli a3, a2, 15
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_324
+; CHECK-RV32-NEXT:    j .LBB61_829
+; CHECK-RV32-NEXT:  .LBB61_324: # %else1214
+; CHECK-RV32-NEXT:    slli a3, a2, 14
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_325
+; CHECK-RV32-NEXT:    j .LBB61_830
+; CHECK-RV32-NEXT:  .LBB61_325: # %else1218
+; CHECK-RV32-NEXT:    slli a3, a2, 13
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_326
+; CHECK-RV32-NEXT:    j .LBB61_831
+; CHECK-RV32-NEXT:  .LBB61_326: # %else1222
+; CHECK-RV32-NEXT:    slli a3, a2, 12
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_327
+; CHECK-RV32-NEXT:    j .LBB61_832
+; CHECK-RV32-NEXT:  .LBB61_327: # %else1226
+; CHECK-RV32-NEXT:    slli a3, a2, 11
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_328
+; CHECK-RV32-NEXT:    j .LBB61_833
+; CHECK-RV32-NEXT:  .LBB61_328: # %else1230
+; CHECK-RV32-NEXT:    slli a3, a2, 10
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_329
+; CHECK-RV32-NEXT:    j .LBB61_834
+; CHECK-RV32-NEXT:  .LBB61_329: # %else1234
+; CHECK-RV32-NEXT:    slli a3, a2, 9
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_330
+; CHECK-RV32-NEXT:    j .LBB61_835
+; CHECK-RV32-NEXT:  .LBB61_330: # %else1238
+; CHECK-RV32-NEXT:    slli a3, a2, 8
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_331
+; CHECK-RV32-NEXT:    j .LBB61_836
+; CHECK-RV32-NEXT:  .LBB61_331: # %else1242
+; CHECK-RV32-NEXT:    slli a3, a2, 7
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_332
+; CHECK-RV32-NEXT:    j .LBB61_837
+; CHECK-RV32-NEXT:  .LBB61_332: # %else1246
+; CHECK-RV32-NEXT:    slli a3, a2, 6
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_333
+; CHECK-RV32-NEXT:    j .LBB61_838
+; CHECK-RV32-NEXT:  .LBB61_333: # %else1250
+; CHECK-RV32-NEXT:    slli a3, a2, 5
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_334
+; CHECK-RV32-NEXT:    j .LBB61_839
+; CHECK-RV32-NEXT:  .LBB61_334: # %else1254
+; CHECK-RV32-NEXT:    slli a3, a2, 4
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_335
+; CHECK-RV32-NEXT:    j .LBB61_840
+; CHECK-RV32-NEXT:  .LBB61_335: # %else1258
+; CHECK-RV32-NEXT:    slli a3, a2, 3
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_336
+; CHECK-RV32-NEXT:    j .LBB61_841
+; CHECK-RV32-NEXT:  .LBB61_336: # %else1262
+; CHECK-RV32-NEXT:    slli a3, a2, 2
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_338
+; CHECK-RV32-NEXT:  .LBB61_337: # %cond.load1265
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 318
+; CHECK-RV32-NEXT:    li a4, 317
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:  .LBB61_338: # %else1266
+; CHECK-RV32-NEXT:    slli a3, a2, 1
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vslidedown.vi v16, v0, 5
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_340
+; CHECK-RV32-NEXT:  # %bb.339: # %cond.load1269
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    li a3, 319
+; CHECK-RV32-NEXT:    li a4, 318
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:  .LBB61_340: # %else1270
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.x.s a3, v16
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_341
+; CHECK-RV32-NEXT:    j .LBB61_842
+; CHECK-RV32-NEXT:  .LBB61_341: # %else1274
+; CHECK-RV32-NEXT:    andi a2, a3, 1
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_342
+; CHECK-RV32-NEXT:    j .LBB61_843
+; CHECK-RV32-NEXT:  .LBB61_342: # %else1278
+; CHECK-RV32-NEXT:    andi a2, a3, 2
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_343
+; CHECK-RV32-NEXT:    j .LBB61_844
+; CHECK-RV32-NEXT:  .LBB61_343: # %else1282
+; CHECK-RV32-NEXT:    andi a2, a3, 4
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_344
+; CHECK-RV32-NEXT:    j .LBB61_845
+; CHECK-RV32-NEXT:  .LBB61_344: # %else1286
+; CHECK-RV32-NEXT:    andi a2, a3, 8
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_345
+; CHECK-RV32-NEXT:    j .LBB61_846
+; CHECK-RV32-NEXT:  .LBB61_345: # %else1290
+; CHECK-RV32-NEXT:    andi a2, a3, 16
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_346
+; CHECK-RV32-NEXT:    j .LBB61_847
+; CHECK-RV32-NEXT:  .LBB61_346: # %else1294
+; CHECK-RV32-NEXT:    andi a2, a3, 32
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_347
+; CHECK-RV32-NEXT:    j .LBB61_848
+; CHECK-RV32-NEXT:  .LBB61_347: # %else1298
+; CHECK-RV32-NEXT:    andi a2, a3, 64
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_348
+; CHECK-RV32-NEXT:    j .LBB61_849
+; CHECK-RV32-NEXT:  .LBB61_348: # %else1302
+; CHECK-RV32-NEXT:    andi a2, a3, 128
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_349
+; CHECK-RV32-NEXT:    j .LBB61_850
+; CHECK-RV32-NEXT:  .LBB61_349: # %else1306
+; CHECK-RV32-NEXT:    andi a2, a3, 256
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_350
+; CHECK-RV32-NEXT:    j .LBB61_851
+; CHECK-RV32-NEXT:  .LBB61_350: # %else1310
+; CHECK-RV32-NEXT:    andi a2, a3, 512
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_351
+; CHECK-RV32-NEXT:    j .LBB61_852
+; CHECK-RV32-NEXT:  .LBB61_351: # %else1314
+; CHECK-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_352
+; CHECK-RV32-NEXT:    j .LBB61_853
+; CHECK-RV32-NEXT:  .LBB61_352: # %else1318
+; CHECK-RV32-NEXT:    slli a2, a3, 20
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_353
+; CHECK-RV32-NEXT:    j .LBB61_854
+; CHECK-RV32-NEXT:  .LBB61_353: # %else1322
+; CHECK-RV32-NEXT:    slli a2, a3, 19
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_354
+; CHECK-RV32-NEXT:    j .LBB61_855
+; CHECK-RV32-NEXT:  .LBB61_354: # %else1326
+; CHECK-RV32-NEXT:    slli a2, a3, 18
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_355
+; CHECK-RV32-NEXT:    j .LBB61_856
+; CHECK-RV32-NEXT:  .LBB61_355: # %else1330
+; CHECK-RV32-NEXT:    slli a2, a3, 17
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_356
+; CHECK-RV32-NEXT:    j .LBB61_857
+; CHECK-RV32-NEXT:  .LBB61_356: # %else1334
+; CHECK-RV32-NEXT:    slli a2, a3, 16
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_357
+; CHECK-RV32-NEXT:    j .LBB61_858
+; CHECK-RV32-NEXT:  .LBB61_357: # %else1338
+; CHECK-RV32-NEXT:    slli a2, a3, 15
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_358
+; CHECK-RV32-NEXT:    j .LBB61_859
+; CHECK-RV32-NEXT:  .LBB61_358: # %else1342
+; CHECK-RV32-NEXT:    slli a2, a3, 14
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_359
+; CHECK-RV32-NEXT:    j .LBB61_860
+; CHECK-RV32-NEXT:  .LBB61_359: # %else1346
+; CHECK-RV32-NEXT:    slli a2, a3, 13
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_360
+; CHECK-RV32-NEXT:    j .LBB61_861
+; CHECK-RV32-NEXT:  .LBB61_360: # %else1350
+; CHECK-RV32-NEXT:    slli a2, a3, 12
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_361
+; CHECK-RV32-NEXT:    j .LBB61_862
+; CHECK-RV32-NEXT:  .LBB61_361: # %else1354
+; CHECK-RV32-NEXT:    slli a2, a3, 11
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_362
+; CHECK-RV32-NEXT:    j .LBB61_863
+; CHECK-RV32-NEXT:  .LBB61_362: # %else1358
+; CHECK-RV32-NEXT:    slli a2, a3, 10
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_363
+; CHECK-RV32-NEXT:    j .LBB61_864
+; CHECK-RV32-NEXT:  .LBB61_363: # %else1362
+; CHECK-RV32-NEXT:    slli a2, a3, 9
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_364
+; CHECK-RV32-NEXT:    j .LBB61_865
+; CHECK-RV32-NEXT:  .LBB61_364: # %else1366
+; CHECK-RV32-NEXT:    slli a2, a3, 8
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_365
+; CHECK-RV32-NEXT:    j .LBB61_866
+; CHECK-RV32-NEXT:  .LBB61_365: # %else1370
+; CHECK-RV32-NEXT:    slli a2, a3, 7
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_366
+; CHECK-RV32-NEXT:    j .LBB61_867
+; CHECK-RV32-NEXT:  .LBB61_366: # %else1374
+; CHECK-RV32-NEXT:    slli a2, a3, 6
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_367
+; CHECK-RV32-NEXT:    j .LBB61_868
+; CHECK-RV32-NEXT:  .LBB61_367: # %else1378
+; CHECK-RV32-NEXT:    slli a2, a3, 5
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_368
+; CHECK-RV32-NEXT:    j .LBB61_869
+; CHECK-RV32-NEXT:  .LBB61_368: # %else1382
+; CHECK-RV32-NEXT:    slli a2, a3, 4
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_369
+; CHECK-RV32-NEXT:    j .LBB61_870
+; CHECK-RV32-NEXT:  .LBB61_369: # %else1386
+; CHECK-RV32-NEXT:    slli a2, a3, 3
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_370
+; CHECK-RV32-NEXT:    j .LBB61_871
+; CHECK-RV32-NEXT:  .LBB61_370: # %else1390
+; CHECK-RV32-NEXT:    slli a2, a3, 2
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_372
+; CHECK-RV32-NEXT:  .LBB61_371: # %cond.load1393
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 350
+; CHECK-RV32-NEXT:    li a4, 349
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:  .LBB61_372: # %else1394
+; CHECK-RV32-NEXT:    slli a2, a3, 1
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vsrl.vx v16, v16, a1
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_374
+; CHECK-RV32-NEXT:  # %bb.373: # %cond.load1397
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 351
+; CHECK-RV32-NEXT:    li a4, 350
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:  .LBB61_374: # %else1398
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.x.s a2, v16
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_375
+; CHECK-RV32-NEXT:    j .LBB61_872
+; CHECK-RV32-NEXT:  .LBB61_375: # %else1402
+; CHECK-RV32-NEXT:    andi a3, a2, 1
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_376
+; CHECK-RV32-NEXT:    j .LBB61_873
+; CHECK-RV32-NEXT:  .LBB61_376: # %else1406
+; CHECK-RV32-NEXT:    andi a3, a2, 2
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_377
+; CHECK-RV32-NEXT:    j .LBB61_874
+; CHECK-RV32-NEXT:  .LBB61_377: # %else1410
+; CHECK-RV32-NEXT:    andi a3, a2, 4
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_378
+; CHECK-RV32-NEXT:    j .LBB61_875
+; CHECK-RV32-NEXT:  .LBB61_378: # %else1414
+; CHECK-RV32-NEXT:    andi a3, a2, 8
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_379
+; CHECK-RV32-NEXT:    j .LBB61_876
+; CHECK-RV32-NEXT:  .LBB61_379: # %else1418
+; CHECK-RV32-NEXT:    andi a3, a2, 16
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_380
+; CHECK-RV32-NEXT:    j .LBB61_877
+; CHECK-RV32-NEXT:  .LBB61_380: # %else1422
+; CHECK-RV32-NEXT:    andi a3, a2, 32
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_381
+; CHECK-RV32-NEXT:    j .LBB61_878
+; CHECK-RV32-NEXT:  .LBB61_381: # %else1426
+; CHECK-RV32-NEXT:    andi a3, a2, 64
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_382
+; CHECK-RV32-NEXT:    j .LBB61_879
+; CHECK-RV32-NEXT:  .LBB61_382: # %else1430
+; CHECK-RV32-NEXT:    andi a3, a2, 128
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_383
+; CHECK-RV32-NEXT:    j .LBB61_880
+; CHECK-RV32-NEXT:  .LBB61_383: # %else1434
+; CHECK-RV32-NEXT:    andi a3, a2, 256
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_384
+; CHECK-RV32-NEXT:    j .LBB61_881
+; CHECK-RV32-NEXT:  .LBB61_384: # %else1438
+; CHECK-RV32-NEXT:    andi a3, a2, 512
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_385
+; CHECK-RV32-NEXT:    j .LBB61_882
+; CHECK-RV32-NEXT:  .LBB61_385: # %else1442
+; CHECK-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_386
+; CHECK-RV32-NEXT:    j .LBB61_883
+; CHECK-RV32-NEXT:  .LBB61_386: # %else1446
+; CHECK-RV32-NEXT:    slli a3, a2, 20
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_387
+; CHECK-RV32-NEXT:    j .LBB61_884
+; CHECK-RV32-NEXT:  .LBB61_387: # %else1450
+; CHECK-RV32-NEXT:    slli a3, a2, 19
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_388
+; CHECK-RV32-NEXT:    j .LBB61_885
+; CHECK-RV32-NEXT:  .LBB61_388: # %else1454
+; CHECK-RV32-NEXT:    slli a3, a2, 18
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_389
+; CHECK-RV32-NEXT:    j .LBB61_886
+; CHECK-RV32-NEXT:  .LBB61_389: # %else1458
+; CHECK-RV32-NEXT:    slli a3, a2, 17
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_390
+; CHECK-RV32-NEXT:    j .LBB61_887
+; CHECK-RV32-NEXT:  .LBB61_390: # %else1462
+; CHECK-RV32-NEXT:    slli a3, a2, 16
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_391
+; CHECK-RV32-NEXT:    j .LBB61_888
+; CHECK-RV32-NEXT:  .LBB61_391: # %else1466
+; CHECK-RV32-NEXT:    slli a3, a2, 15
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_392
+; CHECK-RV32-NEXT:    j .LBB61_889
+; CHECK-RV32-NEXT:  .LBB61_392: # %else1470
+; CHECK-RV32-NEXT:    slli a3, a2, 14
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_393
+; CHECK-RV32-NEXT:    j .LBB61_890
+; CHECK-RV32-NEXT:  .LBB61_393: # %else1474
+; CHECK-RV32-NEXT:    slli a3, a2, 13
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_394
+; CHECK-RV32-NEXT:    j .LBB61_891
+; CHECK-RV32-NEXT:  .LBB61_394: # %else1478
+; CHECK-RV32-NEXT:    slli a3, a2, 12
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_395
+; CHECK-RV32-NEXT:    j .LBB61_892
+; CHECK-RV32-NEXT:  .LBB61_395: # %else1482
+; CHECK-RV32-NEXT:    slli a3, a2, 11
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_396
+; CHECK-RV32-NEXT:    j .LBB61_893
+; CHECK-RV32-NEXT:  .LBB61_396: # %else1486
+; CHECK-RV32-NEXT:    slli a3, a2, 10
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_397
+; CHECK-RV32-NEXT:    j .LBB61_894
+; CHECK-RV32-NEXT:  .LBB61_397: # %else1490
+; CHECK-RV32-NEXT:    slli a3, a2, 9
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_398
+; CHECK-RV32-NEXT:    j .LBB61_895
+; CHECK-RV32-NEXT:  .LBB61_398: # %else1494
+; CHECK-RV32-NEXT:    slli a3, a2, 8
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_399
+; CHECK-RV32-NEXT:    j .LBB61_896
+; CHECK-RV32-NEXT:  .LBB61_399: # %else1498
+; CHECK-RV32-NEXT:    slli a3, a2, 7
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_400
+; CHECK-RV32-NEXT:    j .LBB61_897
+; CHECK-RV32-NEXT:  .LBB61_400: # %else1502
+; CHECK-RV32-NEXT:    slli a3, a2, 6
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_401
+; CHECK-RV32-NEXT:    j .LBB61_898
+; CHECK-RV32-NEXT:  .LBB61_401: # %else1506
+; CHECK-RV32-NEXT:    slli a3, a2, 5
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_402
+; CHECK-RV32-NEXT:    j .LBB61_899
+; CHECK-RV32-NEXT:  .LBB61_402: # %else1510
+; CHECK-RV32-NEXT:    slli a3, a2, 4
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_403
+; CHECK-RV32-NEXT:    j .LBB61_900
+; CHECK-RV32-NEXT:  .LBB61_403: # %else1514
+; CHECK-RV32-NEXT:    slli a3, a2, 3
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_404
+; CHECK-RV32-NEXT:    j .LBB61_901
+; CHECK-RV32-NEXT:  .LBB61_404: # %else1518
+; CHECK-RV32-NEXT:    slli a3, a2, 2
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_406
+; CHECK-RV32-NEXT:  .LBB61_405: # %cond.load1521
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 382
+; CHECK-RV32-NEXT:    li a4, 381
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:  .LBB61_406: # %else1522
+; CHECK-RV32-NEXT:    slli a3, a2, 1
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vslidedown.vi v16, v0, 6
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_408
+; CHECK-RV32-NEXT:  # %bb.407: # %cond.load1525
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    li a3, 383
+; CHECK-RV32-NEXT:    li a4, 382
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:  .LBB61_408: # %else1526
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.x.s a3, v16
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_409
+; CHECK-RV32-NEXT:    j .LBB61_902
+; CHECK-RV32-NEXT:  .LBB61_409: # %else1530
+; CHECK-RV32-NEXT:    andi a2, a3, 1
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_410
+; CHECK-RV32-NEXT:    j .LBB61_903
+; CHECK-RV32-NEXT:  .LBB61_410: # %else1534
+; CHECK-RV32-NEXT:    andi a2, a3, 2
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_411
+; CHECK-RV32-NEXT:    j .LBB61_904
+; CHECK-RV32-NEXT:  .LBB61_411: # %else1538
+; CHECK-RV32-NEXT:    andi a2, a3, 4
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_412
+; CHECK-RV32-NEXT:    j .LBB61_905
+; CHECK-RV32-NEXT:  .LBB61_412: # %else1542
+; CHECK-RV32-NEXT:    andi a2, a3, 8
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_413
+; CHECK-RV32-NEXT:    j .LBB61_906
+; CHECK-RV32-NEXT:  .LBB61_413: # %else1546
+; CHECK-RV32-NEXT:    andi a2, a3, 16
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_414
+; CHECK-RV32-NEXT:    j .LBB61_907
+; CHECK-RV32-NEXT:  .LBB61_414: # %else1550
+; CHECK-RV32-NEXT:    andi a2, a3, 32
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_415
+; CHECK-RV32-NEXT:    j .LBB61_908
+; CHECK-RV32-NEXT:  .LBB61_415: # %else1554
+; CHECK-RV32-NEXT:    andi a2, a3, 64
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_416
+; CHECK-RV32-NEXT:    j .LBB61_909
+; CHECK-RV32-NEXT:  .LBB61_416: # %else1558
+; CHECK-RV32-NEXT:    andi a2, a3, 128
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_417
+; CHECK-RV32-NEXT:    j .LBB61_910
+; CHECK-RV32-NEXT:  .LBB61_417: # %else1562
+; CHECK-RV32-NEXT:    andi a2, a3, 256
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_418
+; CHECK-RV32-NEXT:    j .LBB61_911
+; CHECK-RV32-NEXT:  .LBB61_418: # %else1566
+; CHECK-RV32-NEXT:    andi a2, a3, 512
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_419
+; CHECK-RV32-NEXT:    j .LBB61_912
+; CHECK-RV32-NEXT:  .LBB61_419: # %else1570
+; CHECK-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_420
+; CHECK-RV32-NEXT:    j .LBB61_913
+; CHECK-RV32-NEXT:  .LBB61_420: # %else1574
+; CHECK-RV32-NEXT:    slli a2, a3, 20
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_421
+; CHECK-RV32-NEXT:    j .LBB61_914
+; CHECK-RV32-NEXT:  .LBB61_421: # %else1578
+; CHECK-RV32-NEXT:    slli a2, a3, 19
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_422
+; CHECK-RV32-NEXT:    j .LBB61_915
+; CHECK-RV32-NEXT:  .LBB61_422: # %else1582
+; CHECK-RV32-NEXT:    slli a2, a3, 18
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_423
+; CHECK-RV32-NEXT:    j .LBB61_916
+; CHECK-RV32-NEXT:  .LBB61_423: # %else1586
+; CHECK-RV32-NEXT:    slli a2, a3, 17
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_424
+; CHECK-RV32-NEXT:    j .LBB61_917
+; CHECK-RV32-NEXT:  .LBB61_424: # %else1590
+; CHECK-RV32-NEXT:    slli a2, a3, 16
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_425
+; CHECK-RV32-NEXT:    j .LBB61_918
+; CHECK-RV32-NEXT:  .LBB61_425: # %else1594
+; CHECK-RV32-NEXT:    slli a2, a3, 15
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_426
+; CHECK-RV32-NEXT:    j .LBB61_919
+; CHECK-RV32-NEXT:  .LBB61_426: # %else1598
+; CHECK-RV32-NEXT:    slli a2, a3, 14
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_427
+; CHECK-RV32-NEXT:    j .LBB61_920
+; CHECK-RV32-NEXT:  .LBB61_427: # %else1602
+; CHECK-RV32-NEXT:    slli a2, a3, 13
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_428
+; CHECK-RV32-NEXT:    j .LBB61_921
+; CHECK-RV32-NEXT:  .LBB61_428: # %else1606
+; CHECK-RV32-NEXT:    slli a2, a3, 12
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_429
+; CHECK-RV32-NEXT:    j .LBB61_922
+; CHECK-RV32-NEXT:  .LBB61_429: # %else1610
+; CHECK-RV32-NEXT:    slli a2, a3, 11
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_430
+; CHECK-RV32-NEXT:    j .LBB61_923
+; CHECK-RV32-NEXT:  .LBB61_430: # %else1614
+; CHECK-RV32-NEXT:    slli a2, a3, 10
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_431
+; CHECK-RV32-NEXT:    j .LBB61_924
+; CHECK-RV32-NEXT:  .LBB61_431: # %else1618
+; CHECK-RV32-NEXT:    slli a2, a3, 9
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_432
+; CHECK-RV32-NEXT:    j .LBB61_925
+; CHECK-RV32-NEXT:  .LBB61_432: # %else1622
+; CHECK-RV32-NEXT:    slli a2, a3, 8
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_433
+; CHECK-RV32-NEXT:    j .LBB61_926
+; CHECK-RV32-NEXT:  .LBB61_433: # %else1626
+; CHECK-RV32-NEXT:    slli a2, a3, 7
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_434
+; CHECK-RV32-NEXT:    j .LBB61_927
+; CHECK-RV32-NEXT:  .LBB61_434: # %else1630
+; CHECK-RV32-NEXT:    slli a2, a3, 6
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_435
+; CHECK-RV32-NEXT:    j .LBB61_928
+; CHECK-RV32-NEXT:  .LBB61_435: # %else1634
+; CHECK-RV32-NEXT:    slli a2, a3, 5
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_436
+; CHECK-RV32-NEXT:    j .LBB61_929
+; CHECK-RV32-NEXT:  .LBB61_436: # %else1638
+; CHECK-RV32-NEXT:    slli a2, a3, 4
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_437
+; CHECK-RV32-NEXT:    j .LBB61_930
+; CHECK-RV32-NEXT:  .LBB61_437: # %else1642
+; CHECK-RV32-NEXT:    slli a2, a3, 3
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_438
+; CHECK-RV32-NEXT:    j .LBB61_931
+; CHECK-RV32-NEXT:  .LBB61_438: # %else1646
+; CHECK-RV32-NEXT:    slli a2, a3, 2
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_440
+; CHECK-RV32-NEXT:  .LBB61_439: # %cond.load1649
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 414
+; CHECK-RV32-NEXT:    li a4, 413
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:  .LBB61_440: # %else1650
+; CHECK-RV32-NEXT:    slli a2, a3, 1
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vsrl.vx v16, v16, a1
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_442
+; CHECK-RV32-NEXT:  # %bb.441: # %cond.load1653
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 415
+; CHECK-RV32-NEXT:    li a4, 414
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:  .LBB61_442: # %else1654
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.x.s a2, v16
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_443
+; CHECK-RV32-NEXT:    j .LBB61_932
+; CHECK-RV32-NEXT:  .LBB61_443: # %else1658
+; CHECK-RV32-NEXT:    andi a3, a2, 1
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_444
+; CHECK-RV32-NEXT:    j .LBB61_933
+; CHECK-RV32-NEXT:  .LBB61_444: # %else1662
+; CHECK-RV32-NEXT:    andi a3, a2, 2
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_445
+; CHECK-RV32-NEXT:    j .LBB61_934
+; CHECK-RV32-NEXT:  .LBB61_445: # %else1666
+; CHECK-RV32-NEXT:    andi a3, a2, 4
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_446
+; CHECK-RV32-NEXT:    j .LBB61_935
+; CHECK-RV32-NEXT:  .LBB61_446: # %else1670
+; CHECK-RV32-NEXT:    andi a3, a2, 8
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_447
+; CHECK-RV32-NEXT:    j .LBB61_936
+; CHECK-RV32-NEXT:  .LBB61_447: # %else1674
+; CHECK-RV32-NEXT:    andi a3, a2, 16
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_448
+; CHECK-RV32-NEXT:    j .LBB61_937
+; CHECK-RV32-NEXT:  .LBB61_448: # %else1678
+; CHECK-RV32-NEXT:    andi a3, a2, 32
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_449
+; CHECK-RV32-NEXT:    j .LBB61_938
+; CHECK-RV32-NEXT:  .LBB61_449: # %else1682
+; CHECK-RV32-NEXT:    andi a3, a2, 64
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_450
+; CHECK-RV32-NEXT:    j .LBB61_939
+; CHECK-RV32-NEXT:  .LBB61_450: # %else1686
+; CHECK-RV32-NEXT:    andi a3, a2, 128
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_451
+; CHECK-RV32-NEXT:    j .LBB61_940
+; CHECK-RV32-NEXT:  .LBB61_451: # %else1690
+; CHECK-RV32-NEXT:    andi a3, a2, 256
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_452
+; CHECK-RV32-NEXT:    j .LBB61_941
+; CHECK-RV32-NEXT:  .LBB61_452: # %else1694
+; CHECK-RV32-NEXT:    andi a3, a2, 512
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_453
+; CHECK-RV32-NEXT:    j .LBB61_942
+; CHECK-RV32-NEXT:  .LBB61_453: # %else1698
+; CHECK-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_454
+; CHECK-RV32-NEXT:    j .LBB61_943
+; CHECK-RV32-NEXT:  .LBB61_454: # %else1702
+; CHECK-RV32-NEXT:    slli a3, a2, 20
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_455
+; CHECK-RV32-NEXT:    j .LBB61_944
+; CHECK-RV32-NEXT:  .LBB61_455: # %else1706
+; CHECK-RV32-NEXT:    slli a3, a2, 19
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_456
+; CHECK-RV32-NEXT:    j .LBB61_945
+; CHECK-RV32-NEXT:  .LBB61_456: # %else1710
+; CHECK-RV32-NEXT:    slli a3, a2, 18
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_457
+; CHECK-RV32-NEXT:    j .LBB61_946
+; CHECK-RV32-NEXT:  .LBB61_457: # %else1714
+; CHECK-RV32-NEXT:    slli a3, a2, 17
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_458
+; CHECK-RV32-NEXT:    j .LBB61_947
+; CHECK-RV32-NEXT:  .LBB61_458: # %else1718
+; CHECK-RV32-NEXT:    slli a3, a2, 16
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_459
+; CHECK-RV32-NEXT:    j .LBB61_948
+; CHECK-RV32-NEXT:  .LBB61_459: # %else1722
+; CHECK-RV32-NEXT:    slli a3, a2, 15
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_460
+; CHECK-RV32-NEXT:    j .LBB61_949
+; CHECK-RV32-NEXT:  .LBB61_460: # %else1726
+; CHECK-RV32-NEXT:    slli a3, a2, 14
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_461
+; CHECK-RV32-NEXT:    j .LBB61_950
+; CHECK-RV32-NEXT:  .LBB61_461: # %else1730
+; CHECK-RV32-NEXT:    slli a3, a2, 13
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_462
+; CHECK-RV32-NEXT:    j .LBB61_951
+; CHECK-RV32-NEXT:  .LBB61_462: # %else1734
+; CHECK-RV32-NEXT:    slli a3, a2, 12
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_463
+; CHECK-RV32-NEXT:    j .LBB61_952
+; CHECK-RV32-NEXT:  .LBB61_463: # %else1738
+; CHECK-RV32-NEXT:    slli a3, a2, 11
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_464
+; CHECK-RV32-NEXT:    j .LBB61_953
+; CHECK-RV32-NEXT:  .LBB61_464: # %else1742
+; CHECK-RV32-NEXT:    slli a3, a2, 10
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_465
+; CHECK-RV32-NEXT:    j .LBB61_954
+; CHECK-RV32-NEXT:  .LBB61_465: # %else1746
+; CHECK-RV32-NEXT:    slli a3, a2, 9
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_466
+; CHECK-RV32-NEXT:    j .LBB61_955
+; CHECK-RV32-NEXT:  .LBB61_466: # %else1750
+; CHECK-RV32-NEXT:    slli a3, a2, 8
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_467
+; CHECK-RV32-NEXT:    j .LBB61_956
+; CHECK-RV32-NEXT:  .LBB61_467: # %else1754
+; CHECK-RV32-NEXT:    slli a3, a2, 7
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_468
+; CHECK-RV32-NEXT:    j .LBB61_957
+; CHECK-RV32-NEXT:  .LBB61_468: # %else1758
+; CHECK-RV32-NEXT:    slli a3, a2, 6
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_469
+; CHECK-RV32-NEXT:    j .LBB61_958
+; CHECK-RV32-NEXT:  .LBB61_469: # %else1762
+; CHECK-RV32-NEXT:    slli a3, a2, 5
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_470
+; CHECK-RV32-NEXT:    j .LBB61_959
+; CHECK-RV32-NEXT:  .LBB61_470: # %else1766
+; CHECK-RV32-NEXT:    slli a3, a2, 4
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_471
+; CHECK-RV32-NEXT:    j .LBB61_960
+; CHECK-RV32-NEXT:  .LBB61_471: # %else1770
+; CHECK-RV32-NEXT:    slli a3, a2, 3
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_472
+; CHECK-RV32-NEXT:    j .LBB61_961
+; CHECK-RV32-NEXT:  .LBB61_472: # %else1774
+; CHECK-RV32-NEXT:    slli a3, a2, 2
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_474
+; CHECK-RV32-NEXT:  .LBB61_473: # %cond.load1777
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 446
+; CHECK-RV32-NEXT:    li a4, 445
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:  .LBB61_474: # %else1778
+; CHECK-RV32-NEXT:    slli a3, a2, 1
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vslidedown.vi v16, v0, 7
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_476
+; CHECK-RV32-NEXT:  # %bb.475: # %cond.load1781
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    li a3, 447
+; CHECK-RV32-NEXT:    li a4, 446
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:  .LBB61_476: # %else1782
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.x.s a3, v16
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_477
+; CHECK-RV32-NEXT:    j .LBB61_962
+; CHECK-RV32-NEXT:  .LBB61_477: # %else1786
+; CHECK-RV32-NEXT:    andi a2, a3, 1
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_478
+; CHECK-RV32-NEXT:    j .LBB61_963
+; CHECK-RV32-NEXT:  .LBB61_478: # %else1790
+; CHECK-RV32-NEXT:    andi a2, a3, 2
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_479
+; CHECK-RV32-NEXT:    j .LBB61_964
+; CHECK-RV32-NEXT:  .LBB61_479: # %else1794
+; CHECK-RV32-NEXT:    andi a2, a3, 4
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_480
+; CHECK-RV32-NEXT:    j .LBB61_965
+; CHECK-RV32-NEXT:  .LBB61_480: # %else1798
+; CHECK-RV32-NEXT:    andi a2, a3, 8
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_481
+; CHECK-RV32-NEXT:    j .LBB61_966
+; CHECK-RV32-NEXT:  .LBB61_481: # %else1802
+; CHECK-RV32-NEXT:    andi a2, a3, 16
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_482
+; CHECK-RV32-NEXT:    j .LBB61_967
+; CHECK-RV32-NEXT:  .LBB61_482: # %else1806
+; CHECK-RV32-NEXT:    andi a2, a3, 32
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_483
+; CHECK-RV32-NEXT:    j .LBB61_968
+; CHECK-RV32-NEXT:  .LBB61_483: # %else1810
+; CHECK-RV32-NEXT:    andi a2, a3, 64
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_484
+; CHECK-RV32-NEXT:    j .LBB61_969
+; CHECK-RV32-NEXT:  .LBB61_484: # %else1814
+; CHECK-RV32-NEXT:    andi a2, a3, 128
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_485
+; CHECK-RV32-NEXT:    j .LBB61_970
+; CHECK-RV32-NEXT:  .LBB61_485: # %else1818
+; CHECK-RV32-NEXT:    andi a2, a3, 256
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_486
+; CHECK-RV32-NEXT:    j .LBB61_971
+; CHECK-RV32-NEXT:  .LBB61_486: # %else1822
+; CHECK-RV32-NEXT:    andi a2, a3, 512
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_487
+; CHECK-RV32-NEXT:    j .LBB61_972
+; CHECK-RV32-NEXT:  .LBB61_487: # %else1826
+; CHECK-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_488
+; CHECK-RV32-NEXT:    j .LBB61_973
+; CHECK-RV32-NEXT:  .LBB61_488: # %else1830
+; CHECK-RV32-NEXT:    slli a2, a3, 20
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_489
+; CHECK-RV32-NEXT:    j .LBB61_974
+; CHECK-RV32-NEXT:  .LBB61_489: # %else1834
+; CHECK-RV32-NEXT:    slli a2, a3, 19
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_490
+; CHECK-RV32-NEXT:    j .LBB61_975
+; CHECK-RV32-NEXT:  .LBB61_490: # %else1838
+; CHECK-RV32-NEXT:    slli a2, a3, 18
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_491
+; CHECK-RV32-NEXT:    j .LBB61_976
+; CHECK-RV32-NEXT:  .LBB61_491: # %else1842
+; CHECK-RV32-NEXT:    slli a2, a3, 17
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_492
+; CHECK-RV32-NEXT:    j .LBB61_977
+; CHECK-RV32-NEXT:  .LBB61_492: # %else1846
+; CHECK-RV32-NEXT:    slli a2, a3, 16
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_493
+; CHECK-RV32-NEXT:    j .LBB61_978
+; CHECK-RV32-NEXT:  .LBB61_493: # %else1850
+; CHECK-RV32-NEXT:    slli a2, a3, 15
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_494
+; CHECK-RV32-NEXT:    j .LBB61_979
+; CHECK-RV32-NEXT:  .LBB61_494: # %else1854
+; CHECK-RV32-NEXT:    slli a2, a3, 14
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_495
+; CHECK-RV32-NEXT:    j .LBB61_980
+; CHECK-RV32-NEXT:  .LBB61_495: # %else1858
+; CHECK-RV32-NEXT:    slli a2, a3, 13
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_496
+; CHECK-RV32-NEXT:    j .LBB61_981
+; CHECK-RV32-NEXT:  .LBB61_496: # %else1862
+; CHECK-RV32-NEXT:    slli a2, a3, 12
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_497
+; CHECK-RV32-NEXT:    j .LBB61_982
+; CHECK-RV32-NEXT:  .LBB61_497: # %else1866
+; CHECK-RV32-NEXT:    slli a2, a3, 11
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_498
+; CHECK-RV32-NEXT:    j .LBB61_983
+; CHECK-RV32-NEXT:  .LBB61_498: # %else1870
+; CHECK-RV32-NEXT:    slli a2, a3, 10
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_499
+; CHECK-RV32-NEXT:    j .LBB61_984
+; CHECK-RV32-NEXT:  .LBB61_499: # %else1874
+; CHECK-RV32-NEXT:    slli a2, a3, 9
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_500
+; CHECK-RV32-NEXT:    j .LBB61_985
+; CHECK-RV32-NEXT:  .LBB61_500: # %else1878
+; CHECK-RV32-NEXT:    slli a2, a3, 8
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_501
+; CHECK-RV32-NEXT:    j .LBB61_986
+; CHECK-RV32-NEXT:  .LBB61_501: # %else1882
+; CHECK-RV32-NEXT:    slli a2, a3, 7
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_502
+; CHECK-RV32-NEXT:    j .LBB61_987
+; CHECK-RV32-NEXT:  .LBB61_502: # %else1886
+; CHECK-RV32-NEXT:    slli a2, a3, 6
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_503
+; CHECK-RV32-NEXT:    j .LBB61_988
+; CHECK-RV32-NEXT:  .LBB61_503: # %else1890
+; CHECK-RV32-NEXT:    slli a2, a3, 5
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_504
+; CHECK-RV32-NEXT:    j .LBB61_989
+; CHECK-RV32-NEXT:  .LBB61_504: # %else1894
+; CHECK-RV32-NEXT:    slli a2, a3, 4
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_505
+; CHECK-RV32-NEXT:    j .LBB61_990
+; CHECK-RV32-NEXT:  .LBB61_505: # %else1898
+; CHECK-RV32-NEXT:    slli a2, a3, 3
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_506
+; CHECK-RV32-NEXT:    j .LBB61_991
+; CHECK-RV32-NEXT:  .LBB61_506: # %else1902
+; CHECK-RV32-NEXT:    slli a2, a3, 2
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_508
+; CHECK-RV32-NEXT:  .LBB61_507: # %cond.load1905
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 478
+; CHECK-RV32-NEXT:    li a4, 477
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:  .LBB61_508: # %else1906
+; CHECK-RV32-NEXT:    slli a2, a3, 1
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vsrl.vx v16, v16, a1
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_510
+; CHECK-RV32-NEXT:  # %bb.509: # %cond.load1909
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v24, a1
+; CHECK-RV32-NEXT:    li a1, 479
+; CHECK-RV32-NEXT:    li a2, 478
+; CHECK-RV32-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a2
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:  .LBB61_510: # %else1910
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.x.s a1, v16
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_511
+; CHECK-RV32-NEXT:    j .LBB61_992
+; CHECK-RV32-NEXT:  .LBB61_511: # %else1914
+; CHECK-RV32-NEXT:    andi a2, a1, 1
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_512
+; CHECK-RV32-NEXT:    j .LBB61_993
+; CHECK-RV32-NEXT:  .LBB61_512: # %else1918
+; CHECK-RV32-NEXT:    andi a2, a1, 2
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_513
+; CHECK-RV32-NEXT:    j .LBB61_994
+; CHECK-RV32-NEXT:  .LBB61_513: # %else1922
+; CHECK-RV32-NEXT:    andi a2, a1, 4
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_514
+; CHECK-RV32-NEXT:    j .LBB61_995
+; CHECK-RV32-NEXT:  .LBB61_514: # %else1926
+; CHECK-RV32-NEXT:    andi a2, a1, 8
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_515
+; CHECK-RV32-NEXT:    j .LBB61_996
+; CHECK-RV32-NEXT:  .LBB61_515: # %else1930
+; CHECK-RV32-NEXT:    andi a2, a1, 16
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_516
+; CHECK-RV32-NEXT:    j .LBB61_997
+; CHECK-RV32-NEXT:  .LBB61_516: # %else1934
+; CHECK-RV32-NEXT:    andi a2, a1, 32
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_517
+; CHECK-RV32-NEXT:    j .LBB61_998
+; CHECK-RV32-NEXT:  .LBB61_517: # %else1938
+; CHECK-RV32-NEXT:    andi a2, a1, 64
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_518
+; CHECK-RV32-NEXT:    j .LBB61_999
+; CHECK-RV32-NEXT:  .LBB61_518: # %else1942
+; CHECK-RV32-NEXT:    andi a2, a1, 128
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_519
+; CHECK-RV32-NEXT:    j .LBB61_1000
+; CHECK-RV32-NEXT:  .LBB61_519: # %else1946
+; CHECK-RV32-NEXT:    andi a2, a1, 256
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_520
+; CHECK-RV32-NEXT:    j .LBB61_1001
+; CHECK-RV32-NEXT:  .LBB61_520: # %else1950
+; CHECK-RV32-NEXT:    andi a2, a1, 512
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_521
+; CHECK-RV32-NEXT:    j .LBB61_1002
+; CHECK-RV32-NEXT:  .LBB61_521: # %else1954
+; CHECK-RV32-NEXT:    andi a2, a1, 1024
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_522
+; CHECK-RV32-NEXT:    j .LBB61_1003
+; CHECK-RV32-NEXT:  .LBB61_522: # %else1958
+; CHECK-RV32-NEXT:    slli a2, a1, 20
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_523
+; CHECK-RV32-NEXT:    j .LBB61_1004
+; CHECK-RV32-NEXT:  .LBB61_523: # %else1962
+; CHECK-RV32-NEXT:    slli a2, a1, 19
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_524
+; CHECK-RV32-NEXT:    j .LBB61_1005
+; CHECK-RV32-NEXT:  .LBB61_524: # %else1966
+; CHECK-RV32-NEXT:    slli a2, a1, 18
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_525
+; CHECK-RV32-NEXT:    j .LBB61_1006
+; CHECK-RV32-NEXT:  .LBB61_525: # %else1970
+; CHECK-RV32-NEXT:    slli a2, a1, 17
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_526
+; CHECK-RV32-NEXT:    j .LBB61_1007
+; CHECK-RV32-NEXT:  .LBB61_526: # %else1974
+; CHECK-RV32-NEXT:    slli a2, a1, 16
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_527
+; CHECK-RV32-NEXT:    j .LBB61_1008
+; CHECK-RV32-NEXT:  .LBB61_527: # %else1978
+; CHECK-RV32-NEXT:    slli a2, a1, 15
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_528
+; CHECK-RV32-NEXT:    j .LBB61_1009
+; CHECK-RV32-NEXT:  .LBB61_528: # %else1982
+; CHECK-RV32-NEXT:    slli a2, a1, 14
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_529
+; CHECK-RV32-NEXT:    j .LBB61_1010
+; CHECK-RV32-NEXT:  .LBB61_529: # %else1986
+; CHECK-RV32-NEXT:    slli a2, a1, 13
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_530
+; CHECK-RV32-NEXT:    j .LBB61_1011
+; CHECK-RV32-NEXT:  .LBB61_530: # %else1990
+; CHECK-RV32-NEXT:    slli a2, a1, 12
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_531
+; CHECK-RV32-NEXT:    j .LBB61_1012
+; CHECK-RV32-NEXT:  .LBB61_531: # %else1994
+; CHECK-RV32-NEXT:    slli a2, a1, 11
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_532
+; CHECK-RV32-NEXT:    j .LBB61_1013
+; CHECK-RV32-NEXT:  .LBB61_532: # %else1998
+; CHECK-RV32-NEXT:    slli a2, a1, 10
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_533
+; CHECK-RV32-NEXT:    j .LBB61_1014
+; CHECK-RV32-NEXT:  .LBB61_533: # %else2002
+; CHECK-RV32-NEXT:    slli a2, a1, 9
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_534
+; CHECK-RV32-NEXT:    j .LBB61_1015
+; CHECK-RV32-NEXT:  .LBB61_534: # %else2006
+; CHECK-RV32-NEXT:    slli a2, a1, 8
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_535
+; CHECK-RV32-NEXT:    j .LBB61_1016
+; CHECK-RV32-NEXT:  .LBB61_535: # %else2010
+; CHECK-RV32-NEXT:    slli a2, a1, 7
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_536
+; CHECK-RV32-NEXT:    j .LBB61_1017
+; CHECK-RV32-NEXT:  .LBB61_536: # %else2014
+; CHECK-RV32-NEXT:    slli a2, a1, 6
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_537
+; CHECK-RV32-NEXT:    j .LBB61_1018
+; CHECK-RV32-NEXT:  .LBB61_537: # %else2018
+; CHECK-RV32-NEXT:    slli a2, a1, 5
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_538
+; CHECK-RV32-NEXT:    j .LBB61_1019
+; CHECK-RV32-NEXT:  .LBB61_538: # %else2022
+; CHECK-RV32-NEXT:    slli a2, a1, 4
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_539
+; CHECK-RV32-NEXT:    j .LBB61_1020
+; CHECK-RV32-NEXT:  .LBB61_539: # %else2026
+; CHECK-RV32-NEXT:    slli a2, a1, 3
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_540
+; CHECK-RV32-NEXT:    j .LBB61_1021
+; CHECK-RV32-NEXT:  .LBB61_540: # %else2030
+; CHECK-RV32-NEXT:    slli a2, a1, 2
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_541
+; CHECK-RV32-NEXT:    j .LBB61_1022
+; CHECK-RV32-NEXT:  .LBB61_541: # %else2034
+; CHECK-RV32-NEXT:    slli a2, a1, 1
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_542
+; CHECK-RV32-NEXT:    j .LBB61_1023
+; CHECK-RV32-NEXT:  .LBB61_542: # %else2038
+; CHECK-RV32-NEXT:    bgez a1, .LBB61_543
+; CHECK-RV32-NEXT:    j .LBB61_1024
+; CHECK-RV32-NEXT:  .LBB61_543: # %else2042
+; CHECK-RV32-NEXT:    ret
+; CHECK-RV32-NEXT:  .LBB61_544: # %cond.load
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    vsetvli zero, zero, e8, mf8, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v8, a1
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a1, a3, 2
+; CHECK-RV32-NEXT:    bnez a1, .LBB61_545
+; CHECK-RV32-NEXT:    j .LBB61_2
+; CHECK-RV32-NEXT:  .LBB61_545: # %cond.load1
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetvli zero, zero, e8, mf8, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vsetivli zero, 2, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 1
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a1, a3, 4
+; CHECK-RV32-NEXT:    bnez a1, .LBB61_546
+; CHECK-RV32-NEXT:    j .LBB61_3
+; CHECK-RV32-NEXT:  .LBB61_546: # %cond.load5
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 2
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a1, a3, 8
+; CHECK-RV32-NEXT:    bnez a1, .LBB61_547
+; CHECK-RV32-NEXT:    j .LBB61_4
+; CHECK-RV32-NEXT:  .LBB61_547: # %cond.load9
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 4, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a1, a3, 16
+; CHECK-RV32-NEXT:    bnez a1, .LBB61_548
+; CHECK-RV32-NEXT:    j .LBB61_5
+; CHECK-RV32-NEXT:  .LBB61_548: # %cond.load13
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 5, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a1, a3, 32
+; CHECK-RV32-NEXT:    bnez a1, .LBB61_549
+; CHECK-RV32-NEXT:    j .LBB61_6
+; CHECK-RV32-NEXT:  .LBB61_549: # %cond.load17
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 6, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 5
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a1, a3, 64
+; CHECK-RV32-NEXT:    bnez a1, .LBB61_550
+; CHECK-RV32-NEXT:    j .LBB61_7
+; CHECK-RV32-NEXT:  .LBB61_550: # %cond.load21
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 7, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 6
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a1, a3, 128
+; CHECK-RV32-NEXT:    bnez a1, .LBB61_551
+; CHECK-RV32-NEXT:    j .LBB61_8
+; CHECK-RV32-NEXT:  .LBB61_551: # %cond.load25
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 8, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 7
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a1, a3, 256
+; CHECK-RV32-NEXT:    bnez a1, .LBB61_552
+; CHECK-RV32-NEXT:    j .LBB61_9
+; CHECK-RV32-NEXT:  .LBB61_552: # %cond.load29
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 9, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 8
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a1, a3, 512
+; CHECK-RV32-NEXT:    bnez a1, .LBB61_553
+; CHECK-RV32-NEXT:    j .LBB61_10
+; CHECK-RV32-NEXT:  .LBB61_553: # %cond.load33
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 10, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 9
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a1, a3, 1024
+; CHECK-RV32-NEXT:    bnez a1, .LBB61_554
+; CHECK-RV32-NEXT:    j .LBB61_11
+; CHECK-RV32-NEXT:  .LBB61_554: # %cond.load37
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 11, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 10
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a1, a3, 20
+; CHECK-RV32-NEXT:    bltz a1, .LBB61_555
+; CHECK-RV32-NEXT:    j .LBB61_12
+; CHECK-RV32-NEXT:  .LBB61_555: # %cond.load41
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 12, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 11
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a1, a3, 19
+; CHECK-RV32-NEXT:    bltz a1, .LBB61_556
+; CHECK-RV32-NEXT:    j .LBB61_13
+; CHECK-RV32-NEXT:  .LBB61_556: # %cond.load45
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 13, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 12
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a1, a3, 18
+; CHECK-RV32-NEXT:    bltz a1, .LBB61_557
+; CHECK-RV32-NEXT:    j .LBB61_14
+; CHECK-RV32-NEXT:  .LBB61_557: # %cond.load49
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 14, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 13
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a1, a3, 17
+; CHECK-RV32-NEXT:    bltz a1, .LBB61_558
+; CHECK-RV32-NEXT:    j .LBB61_15
+; CHECK-RV32-NEXT:  .LBB61_558: # %cond.load53
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 15, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 14
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a1, a3, 16
+; CHECK-RV32-NEXT:    bltz a1, .LBB61_559
+; CHECK-RV32-NEXT:    j .LBB61_16
+; CHECK-RV32-NEXT:  .LBB61_559: # %cond.load57
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 16, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 15
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a1, a3, 15
+; CHECK-RV32-NEXT:    bltz a1, .LBB61_560
+; CHECK-RV32-NEXT:    j .LBB61_17
+; CHECK-RV32-NEXT:  .LBB61_560: # %cond.load61
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 17, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 16
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a1, a3, 14
+; CHECK-RV32-NEXT:    bltz a1, .LBB61_561
+; CHECK-RV32-NEXT:    j .LBB61_18
+; CHECK-RV32-NEXT:  .LBB61_561: # %cond.load65
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 18, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 17
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a1, a3, 13
+; CHECK-RV32-NEXT:    bltz a1, .LBB61_562
+; CHECK-RV32-NEXT:    j .LBB61_19
+; CHECK-RV32-NEXT:  .LBB61_562: # %cond.load69
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 19, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 18
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a1, a3, 12
+; CHECK-RV32-NEXT:    bltz a1, .LBB61_563
+; CHECK-RV32-NEXT:    j .LBB61_20
+; CHECK-RV32-NEXT:  .LBB61_563: # %cond.load73
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 20, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 19
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a1, a3, 11
+; CHECK-RV32-NEXT:    bltz a1, .LBB61_564
+; CHECK-RV32-NEXT:    j .LBB61_21
+; CHECK-RV32-NEXT:  .LBB61_564: # %cond.load77
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 21, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 20
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a1, a3, 10
+; CHECK-RV32-NEXT:    bltz a1, .LBB61_565
+; CHECK-RV32-NEXT:    j .LBB61_22
+; CHECK-RV32-NEXT:  .LBB61_565: # %cond.load81
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 22, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 21
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a1, a3, 9
+; CHECK-RV32-NEXT:    bltz a1, .LBB61_566
+; CHECK-RV32-NEXT:    j .LBB61_23
+; CHECK-RV32-NEXT:  .LBB61_566: # %cond.load85
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 23, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 22
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a1, a3, 8
+; CHECK-RV32-NEXT:    bltz a1, .LBB61_567
+; CHECK-RV32-NEXT:    j .LBB61_24
+; CHECK-RV32-NEXT:  .LBB61_567: # %cond.load89
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 24, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 23
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a1, a3, 7
+; CHECK-RV32-NEXT:    bltz a1, .LBB61_568
+; CHECK-RV32-NEXT:    j .LBB61_25
+; CHECK-RV32-NEXT:  .LBB61_568: # %cond.load93
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 25, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 24
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a1, a3, 6
+; CHECK-RV32-NEXT:    bltz a1, .LBB61_569
+; CHECK-RV32-NEXT:    j .LBB61_26
+; CHECK-RV32-NEXT:  .LBB61_569: # %cond.load97
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 26, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 25
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a1, a3, 5
+; CHECK-RV32-NEXT:    bltz a1, .LBB61_570
+; CHECK-RV32-NEXT:    j .LBB61_27
+; CHECK-RV32-NEXT:  .LBB61_570: # %cond.load101
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 27, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 26
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a1, a3, 4
+; CHECK-RV32-NEXT:    bltz a1, .LBB61_571
+; CHECK-RV32-NEXT:    j .LBB61_28
+; CHECK-RV32-NEXT:  .LBB61_571: # %cond.load105
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 28, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 27
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a1, a3, 3
+; CHECK-RV32-NEXT:    bgez a1, .LBB61_1025
+; CHECK-RV32-NEXT:    j .LBB61_29
+; CHECK-RV32-NEXT:  .LBB61_1025: # %cond.load105
+; CHECK-RV32-NEXT:    j .LBB61_30
+; CHECK-RV32-NEXT:  .LBB61_572: # %cond.load121
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 32
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vi v8, v24, 31
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 1
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_573
+; CHECK-RV32-NEXT:    j .LBB61_36
+; CHECK-RV32-NEXT:  .LBB61_573: # %cond.load125
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 33
+; CHECK-RV32-NEXT:    li a4, 32
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 2
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_574
+; CHECK-RV32-NEXT:    j .LBB61_37
+; CHECK-RV32-NEXT:  .LBB61_574: # %cond.load129
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 34
+; CHECK-RV32-NEXT:    li a4, 33
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 4
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_575
+; CHECK-RV32-NEXT:    j .LBB61_38
+; CHECK-RV32-NEXT:  .LBB61_575: # %cond.load133
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 35
+; CHECK-RV32-NEXT:    li a4, 34
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 8
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_576
+; CHECK-RV32-NEXT:    j .LBB61_39
+; CHECK-RV32-NEXT:  .LBB61_576: # %cond.load137
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 36
+; CHECK-RV32-NEXT:    li a4, 35
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 16
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_577
+; CHECK-RV32-NEXT:    j .LBB61_40
+; CHECK-RV32-NEXT:  .LBB61_577: # %cond.load141
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 37
+; CHECK-RV32-NEXT:    li a4, 36
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 32
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_578
+; CHECK-RV32-NEXT:    j .LBB61_41
+; CHECK-RV32-NEXT:  .LBB61_578: # %cond.load145
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 38
+; CHECK-RV32-NEXT:    li a4, 37
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 64
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_579
+; CHECK-RV32-NEXT:    j .LBB61_42
+; CHECK-RV32-NEXT:  .LBB61_579: # %cond.load149
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 39
+; CHECK-RV32-NEXT:    li a4, 38
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 128
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_580
+; CHECK-RV32-NEXT:    j .LBB61_43
+; CHECK-RV32-NEXT:  .LBB61_580: # %cond.load153
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 40
+; CHECK-RV32-NEXT:    li a4, 39
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 256
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_581
+; CHECK-RV32-NEXT:    j .LBB61_44
+; CHECK-RV32-NEXT:  .LBB61_581: # %cond.load157
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 41
+; CHECK-RV32-NEXT:    li a4, 40
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 512
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_582
+; CHECK-RV32-NEXT:    j .LBB61_45
+; CHECK-RV32-NEXT:  .LBB61_582: # %cond.load161
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 42
+; CHECK-RV32-NEXT:    li a4, 41
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_583
+; CHECK-RV32-NEXT:    j .LBB61_46
+; CHECK-RV32-NEXT:  .LBB61_583: # %cond.load165
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 43
+; CHECK-RV32-NEXT:    li a4, 42
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 20
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_584
+; CHECK-RV32-NEXT:    j .LBB61_47
+; CHECK-RV32-NEXT:  .LBB61_584: # %cond.load169
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 44
+; CHECK-RV32-NEXT:    li a4, 43
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 19
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_585
+; CHECK-RV32-NEXT:    j .LBB61_48
+; CHECK-RV32-NEXT:  .LBB61_585: # %cond.load173
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 45
+; CHECK-RV32-NEXT:    li a4, 44
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 18
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_586
+; CHECK-RV32-NEXT:    j .LBB61_49
+; CHECK-RV32-NEXT:  .LBB61_586: # %cond.load177
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 46
+; CHECK-RV32-NEXT:    li a4, 45
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 17
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_587
+; CHECK-RV32-NEXT:    j .LBB61_50
+; CHECK-RV32-NEXT:  .LBB61_587: # %cond.load181
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 47
+; CHECK-RV32-NEXT:    li a4, 46
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 16
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_588
+; CHECK-RV32-NEXT:    j .LBB61_51
+; CHECK-RV32-NEXT:  .LBB61_588: # %cond.load185
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 48
+; CHECK-RV32-NEXT:    li a4, 47
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 15
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_589
+; CHECK-RV32-NEXT:    j .LBB61_52
+; CHECK-RV32-NEXT:  .LBB61_589: # %cond.load189
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 49
+; CHECK-RV32-NEXT:    li a4, 48
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 14
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_590
+; CHECK-RV32-NEXT:    j .LBB61_53
+; CHECK-RV32-NEXT:  .LBB61_590: # %cond.load193
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 50
+; CHECK-RV32-NEXT:    li a4, 49
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 13
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_591
+; CHECK-RV32-NEXT:    j .LBB61_54
+; CHECK-RV32-NEXT:  .LBB61_591: # %cond.load197
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 51
+; CHECK-RV32-NEXT:    li a4, 50
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 12
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_592
+; CHECK-RV32-NEXT:    j .LBB61_55
+; CHECK-RV32-NEXT:  .LBB61_592: # %cond.load201
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 52
+; CHECK-RV32-NEXT:    li a4, 51
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 11
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_593
+; CHECK-RV32-NEXT:    j .LBB61_56
+; CHECK-RV32-NEXT:  .LBB61_593: # %cond.load205
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 53
+; CHECK-RV32-NEXT:    li a4, 52
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 10
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_594
+; CHECK-RV32-NEXT:    j .LBB61_57
+; CHECK-RV32-NEXT:  .LBB61_594: # %cond.load209
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 54
+; CHECK-RV32-NEXT:    li a4, 53
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 9
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_595
+; CHECK-RV32-NEXT:    j .LBB61_58
+; CHECK-RV32-NEXT:  .LBB61_595: # %cond.load213
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 55
+; CHECK-RV32-NEXT:    li a4, 54
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 8
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_596
+; CHECK-RV32-NEXT:    j .LBB61_59
+; CHECK-RV32-NEXT:  .LBB61_596: # %cond.load217
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 56
+; CHECK-RV32-NEXT:    li a4, 55
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 7
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_597
+; CHECK-RV32-NEXT:    j .LBB61_60
+; CHECK-RV32-NEXT:  .LBB61_597: # %cond.load221
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 57
+; CHECK-RV32-NEXT:    li a4, 56
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 6
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_598
+; CHECK-RV32-NEXT:    j .LBB61_61
+; CHECK-RV32-NEXT:  .LBB61_598: # %cond.load225
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 58
+; CHECK-RV32-NEXT:    li a4, 57
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 5
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_599
+; CHECK-RV32-NEXT:    j .LBB61_62
+; CHECK-RV32-NEXT:  .LBB61_599: # %cond.load229
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 59
+; CHECK-RV32-NEXT:    li a4, 58
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 4
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_600
+; CHECK-RV32-NEXT:    j .LBB61_63
+; CHECK-RV32-NEXT:  .LBB61_600: # %cond.load233
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 60
+; CHECK-RV32-NEXT:    li a4, 59
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 3
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_601
+; CHECK-RV32-NEXT:    j .LBB61_64
+; CHECK-RV32-NEXT:  .LBB61_601: # %cond.load237
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 61
+; CHECK-RV32-NEXT:    li a4, 60
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 2
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_1026
+; CHECK-RV32-NEXT:    j .LBB61_65
+; CHECK-RV32-NEXT:  .LBB61_1026: # %cond.load237
+; CHECK-RV32-NEXT:    j .LBB61_66
+; CHECK-RV32-NEXT:  .LBB61_602: # %cond.load249
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v17, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 64
+; CHECK-RV32-NEXT:    li a4, 63
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v17, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 1
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_603
+; CHECK-RV32-NEXT:    j .LBB61_70
+; CHECK-RV32-NEXT:  .LBB61_603: # %cond.load253
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 65
+; CHECK-RV32-NEXT:    li a4, 64
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 2
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_604
+; CHECK-RV32-NEXT:    j .LBB61_71
+; CHECK-RV32-NEXT:  .LBB61_604: # %cond.load257
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 66
+; CHECK-RV32-NEXT:    li a4, 65
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 4
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_605
+; CHECK-RV32-NEXT:    j .LBB61_72
+; CHECK-RV32-NEXT:  .LBB61_605: # %cond.load261
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 67
+; CHECK-RV32-NEXT:    li a4, 66
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 8
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_606
+; CHECK-RV32-NEXT:    j .LBB61_73
+; CHECK-RV32-NEXT:  .LBB61_606: # %cond.load265
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 68
+; CHECK-RV32-NEXT:    li a4, 67
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 16
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_607
+; CHECK-RV32-NEXT:    j .LBB61_74
+; CHECK-RV32-NEXT:  .LBB61_607: # %cond.load269
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 69
+; CHECK-RV32-NEXT:    li a4, 68
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 32
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_608
+; CHECK-RV32-NEXT:    j .LBB61_75
+; CHECK-RV32-NEXT:  .LBB61_608: # %cond.load273
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 70
+; CHECK-RV32-NEXT:    li a4, 69
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 64
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_609
+; CHECK-RV32-NEXT:    j .LBB61_76
+; CHECK-RV32-NEXT:  .LBB61_609: # %cond.load277
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 71
+; CHECK-RV32-NEXT:    li a4, 70
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 128
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_610
+; CHECK-RV32-NEXT:    j .LBB61_77
+; CHECK-RV32-NEXT:  .LBB61_610: # %cond.load281
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 72
+; CHECK-RV32-NEXT:    li a4, 71
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 256
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_611
+; CHECK-RV32-NEXT:    j .LBB61_78
+; CHECK-RV32-NEXT:  .LBB61_611: # %cond.load285
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 73
+; CHECK-RV32-NEXT:    li a4, 72
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 512
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_612
+; CHECK-RV32-NEXT:    j .LBB61_79
+; CHECK-RV32-NEXT:  .LBB61_612: # %cond.load289
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 74
+; CHECK-RV32-NEXT:    li a4, 73
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_613
+; CHECK-RV32-NEXT:    j .LBB61_80
+; CHECK-RV32-NEXT:  .LBB61_613: # %cond.load293
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 75
+; CHECK-RV32-NEXT:    li a4, 74
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 20
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_614
+; CHECK-RV32-NEXT:    j .LBB61_81
+; CHECK-RV32-NEXT:  .LBB61_614: # %cond.load297
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 76
+; CHECK-RV32-NEXT:    li a4, 75
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 19
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_615
+; CHECK-RV32-NEXT:    j .LBB61_82
+; CHECK-RV32-NEXT:  .LBB61_615: # %cond.load301
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 77
+; CHECK-RV32-NEXT:    li a4, 76
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 18
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_616
+; CHECK-RV32-NEXT:    j .LBB61_83
+; CHECK-RV32-NEXT:  .LBB61_616: # %cond.load305
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 78
+; CHECK-RV32-NEXT:    li a4, 77
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 17
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_617
+; CHECK-RV32-NEXT:    j .LBB61_84
+; CHECK-RV32-NEXT:  .LBB61_617: # %cond.load309
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 79
+; CHECK-RV32-NEXT:    li a4, 78
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 16
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_618
+; CHECK-RV32-NEXT:    j .LBB61_85
+; CHECK-RV32-NEXT:  .LBB61_618: # %cond.load313
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 80
+; CHECK-RV32-NEXT:    li a4, 79
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 15
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_619
+; CHECK-RV32-NEXT:    j .LBB61_86
+; CHECK-RV32-NEXT:  .LBB61_619: # %cond.load317
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 81
+; CHECK-RV32-NEXT:    li a4, 80
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 14
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_620
+; CHECK-RV32-NEXT:    j .LBB61_87
+; CHECK-RV32-NEXT:  .LBB61_620: # %cond.load321
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 82
+; CHECK-RV32-NEXT:    li a4, 81
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 13
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_621
+; CHECK-RV32-NEXT:    j .LBB61_88
+; CHECK-RV32-NEXT:  .LBB61_621: # %cond.load325
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 83
+; CHECK-RV32-NEXT:    li a4, 82
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 12
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_622
+; CHECK-RV32-NEXT:    j .LBB61_89
+; CHECK-RV32-NEXT:  .LBB61_622: # %cond.load329
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 84
+; CHECK-RV32-NEXT:    li a4, 83
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 11
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_623
+; CHECK-RV32-NEXT:    j .LBB61_90
+; CHECK-RV32-NEXT:  .LBB61_623: # %cond.load333
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 85
+; CHECK-RV32-NEXT:    li a4, 84
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 10
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_624
+; CHECK-RV32-NEXT:    j .LBB61_91
+; CHECK-RV32-NEXT:  .LBB61_624: # %cond.load337
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 86
+; CHECK-RV32-NEXT:    li a4, 85
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 9
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_625
+; CHECK-RV32-NEXT:    j .LBB61_92
+; CHECK-RV32-NEXT:  .LBB61_625: # %cond.load341
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 87
+; CHECK-RV32-NEXT:    li a4, 86
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 8
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_626
+; CHECK-RV32-NEXT:    j .LBB61_93
+; CHECK-RV32-NEXT:  .LBB61_626: # %cond.load345
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 88
+; CHECK-RV32-NEXT:    li a4, 87
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 7
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_627
+; CHECK-RV32-NEXT:    j .LBB61_94
+; CHECK-RV32-NEXT:  .LBB61_627: # %cond.load349
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 89
+; CHECK-RV32-NEXT:    li a4, 88
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 6
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_628
+; CHECK-RV32-NEXT:    j .LBB61_95
+; CHECK-RV32-NEXT:  .LBB61_628: # %cond.load353
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 90
+; CHECK-RV32-NEXT:    li a4, 89
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 5
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_629
+; CHECK-RV32-NEXT:    j .LBB61_96
+; CHECK-RV32-NEXT:  .LBB61_629: # %cond.load357
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 91
+; CHECK-RV32-NEXT:    li a4, 90
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 4
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_630
+; CHECK-RV32-NEXT:    j .LBB61_97
+; CHECK-RV32-NEXT:  .LBB61_630: # %cond.load361
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 92
+; CHECK-RV32-NEXT:    li a4, 91
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 3
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_631
+; CHECK-RV32-NEXT:    j .LBB61_98
+; CHECK-RV32-NEXT:  .LBB61_631: # %cond.load365
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 93
+; CHECK-RV32-NEXT:    li a4, 92
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 2
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_1027
+; CHECK-RV32-NEXT:    j .LBB61_99
+; CHECK-RV32-NEXT:  .LBB61_1027: # %cond.load365
+; CHECK-RV32-NEXT:    j .LBB61_100
+; CHECK-RV32-NEXT:  .LBB61_632: # %cond.load377
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 96
+; CHECK-RV32-NEXT:    li a4, 95
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 1
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_633
+; CHECK-RV32-NEXT:    j .LBB61_104
+; CHECK-RV32-NEXT:  .LBB61_633: # %cond.load381
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 97
+; CHECK-RV32-NEXT:    li a4, 96
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 2
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_634
+; CHECK-RV32-NEXT:    j .LBB61_105
+; CHECK-RV32-NEXT:  .LBB61_634: # %cond.load385
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 98
+; CHECK-RV32-NEXT:    li a4, 97
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 4
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_635
+; CHECK-RV32-NEXT:    j .LBB61_106
+; CHECK-RV32-NEXT:  .LBB61_635: # %cond.load389
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 99
+; CHECK-RV32-NEXT:    li a4, 98
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 8
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_636
+; CHECK-RV32-NEXT:    j .LBB61_107
+; CHECK-RV32-NEXT:  .LBB61_636: # %cond.load393
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 100
+; CHECK-RV32-NEXT:    li a4, 99
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 16
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_637
+; CHECK-RV32-NEXT:    j .LBB61_108
+; CHECK-RV32-NEXT:  .LBB61_637: # %cond.load397
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 101
+; CHECK-RV32-NEXT:    li a4, 100
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 32
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_638
+; CHECK-RV32-NEXT:    j .LBB61_109
+; CHECK-RV32-NEXT:  .LBB61_638: # %cond.load401
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 102
+; CHECK-RV32-NEXT:    li a4, 101
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 64
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_639
+; CHECK-RV32-NEXT:    j .LBB61_110
+; CHECK-RV32-NEXT:  .LBB61_639: # %cond.load405
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 103
+; CHECK-RV32-NEXT:    li a4, 102
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 128
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_640
+; CHECK-RV32-NEXT:    j .LBB61_111
+; CHECK-RV32-NEXT:  .LBB61_640: # %cond.load409
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 104
+; CHECK-RV32-NEXT:    li a4, 103
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 256
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_641
+; CHECK-RV32-NEXT:    j .LBB61_112
+; CHECK-RV32-NEXT:  .LBB61_641: # %cond.load413
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 105
+; CHECK-RV32-NEXT:    li a4, 104
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 512
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_642
+; CHECK-RV32-NEXT:    j .LBB61_113
+; CHECK-RV32-NEXT:  .LBB61_642: # %cond.load417
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 106
+; CHECK-RV32-NEXT:    li a4, 105
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_643
+; CHECK-RV32-NEXT:    j .LBB61_114
+; CHECK-RV32-NEXT:  .LBB61_643: # %cond.load421
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 107
+; CHECK-RV32-NEXT:    li a4, 106
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 20
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_644
+; CHECK-RV32-NEXT:    j .LBB61_115
+; CHECK-RV32-NEXT:  .LBB61_644: # %cond.load425
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 108
+; CHECK-RV32-NEXT:    li a4, 107
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 19
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_645
+; CHECK-RV32-NEXT:    j .LBB61_116
+; CHECK-RV32-NEXT:  .LBB61_645: # %cond.load429
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 109
+; CHECK-RV32-NEXT:    li a4, 108
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 18
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_646
+; CHECK-RV32-NEXT:    j .LBB61_117
+; CHECK-RV32-NEXT:  .LBB61_646: # %cond.load433
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 110
+; CHECK-RV32-NEXT:    li a4, 109
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 17
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_647
+; CHECK-RV32-NEXT:    j .LBB61_118
+; CHECK-RV32-NEXT:  .LBB61_647: # %cond.load437
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 111
+; CHECK-RV32-NEXT:    li a4, 110
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 16
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_648
+; CHECK-RV32-NEXT:    j .LBB61_119
+; CHECK-RV32-NEXT:  .LBB61_648: # %cond.load441
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 112
+; CHECK-RV32-NEXT:    li a4, 111
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 15
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_649
+; CHECK-RV32-NEXT:    j .LBB61_120
+; CHECK-RV32-NEXT:  .LBB61_649: # %cond.load445
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 113
+; CHECK-RV32-NEXT:    li a4, 112
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 14
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_650
+; CHECK-RV32-NEXT:    j .LBB61_121
+; CHECK-RV32-NEXT:  .LBB61_650: # %cond.load449
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 114
+; CHECK-RV32-NEXT:    li a4, 113
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 13
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_651
+; CHECK-RV32-NEXT:    j .LBB61_122
+; CHECK-RV32-NEXT:  .LBB61_651: # %cond.load453
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 115
+; CHECK-RV32-NEXT:    li a4, 114
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 12
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_652
+; CHECK-RV32-NEXT:    j .LBB61_123
+; CHECK-RV32-NEXT:  .LBB61_652: # %cond.load457
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 116
+; CHECK-RV32-NEXT:    li a4, 115
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 11
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_653
+; CHECK-RV32-NEXT:    j .LBB61_124
+; CHECK-RV32-NEXT:  .LBB61_653: # %cond.load461
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 117
+; CHECK-RV32-NEXT:    li a4, 116
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 10
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_654
+; CHECK-RV32-NEXT:    j .LBB61_125
+; CHECK-RV32-NEXT:  .LBB61_654: # %cond.load465
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 118
+; CHECK-RV32-NEXT:    li a4, 117
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 9
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_655
+; CHECK-RV32-NEXT:    j .LBB61_126
+; CHECK-RV32-NEXT:  .LBB61_655: # %cond.load469
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 119
+; CHECK-RV32-NEXT:    li a4, 118
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 8
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_656
+; CHECK-RV32-NEXT:    j .LBB61_127
+; CHECK-RV32-NEXT:  .LBB61_656: # %cond.load473
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 120
+; CHECK-RV32-NEXT:    li a4, 119
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 7
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_657
+; CHECK-RV32-NEXT:    j .LBB61_128
+; CHECK-RV32-NEXT:  .LBB61_657: # %cond.load477
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 121
+; CHECK-RV32-NEXT:    li a4, 120
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 6
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_658
+; CHECK-RV32-NEXT:    j .LBB61_129
+; CHECK-RV32-NEXT:  .LBB61_658: # %cond.load481
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 122
+; CHECK-RV32-NEXT:    li a4, 121
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 5
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_659
+; CHECK-RV32-NEXT:    j .LBB61_130
+; CHECK-RV32-NEXT:  .LBB61_659: # %cond.load485
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 123
+; CHECK-RV32-NEXT:    li a4, 122
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 4
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_660
+; CHECK-RV32-NEXT:    j .LBB61_131
+; CHECK-RV32-NEXT:  .LBB61_660: # %cond.load489
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 124
+; CHECK-RV32-NEXT:    li a4, 123
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 3
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_661
+; CHECK-RV32-NEXT:    j .LBB61_132
+; CHECK-RV32-NEXT:  .LBB61_661: # %cond.load493
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 125
+; CHECK-RV32-NEXT:    li a4, 124
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 2
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_1028
+; CHECK-RV32-NEXT:    j .LBB61_133
+; CHECK-RV32-NEXT:  .LBB61_1028: # %cond.load493
+; CHECK-RV32-NEXT:    j .LBB61_134
+; CHECK-RV32-NEXT:  .LBB61_662: # %cond.load505
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 128
+; CHECK-RV32-NEXT:    li a4, 127
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 1
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_663
+; CHECK-RV32-NEXT:    j .LBB61_138
+; CHECK-RV32-NEXT:  .LBB61_663: # %cond.load509
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 129
+; CHECK-RV32-NEXT:    li a4, 128
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 2
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_664
+; CHECK-RV32-NEXT:    j .LBB61_139
+; CHECK-RV32-NEXT:  .LBB61_664: # %cond.load513
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 130
+; CHECK-RV32-NEXT:    li a4, 129
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 4
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_665
+; CHECK-RV32-NEXT:    j .LBB61_140
+; CHECK-RV32-NEXT:  .LBB61_665: # %cond.load517
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 131
+; CHECK-RV32-NEXT:    li a4, 130
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 8
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_666
+; CHECK-RV32-NEXT:    j .LBB61_141
+; CHECK-RV32-NEXT:  .LBB61_666: # %cond.load521
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 132
+; CHECK-RV32-NEXT:    li a4, 131
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 16
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_667
+; CHECK-RV32-NEXT:    j .LBB61_142
+; CHECK-RV32-NEXT:  .LBB61_667: # %cond.load525
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 133
+; CHECK-RV32-NEXT:    li a4, 132
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 32
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_668
+; CHECK-RV32-NEXT:    j .LBB61_143
+; CHECK-RV32-NEXT:  .LBB61_668: # %cond.load529
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 134
+; CHECK-RV32-NEXT:    li a4, 133
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 64
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_669
+; CHECK-RV32-NEXT:    j .LBB61_144
+; CHECK-RV32-NEXT:  .LBB61_669: # %cond.load533
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 135
+; CHECK-RV32-NEXT:    li a4, 134
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 128
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_670
+; CHECK-RV32-NEXT:    j .LBB61_145
+; CHECK-RV32-NEXT:  .LBB61_670: # %cond.load537
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 136
+; CHECK-RV32-NEXT:    li a4, 135
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 256
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_671
+; CHECK-RV32-NEXT:    j .LBB61_146
+; CHECK-RV32-NEXT:  .LBB61_671: # %cond.load541
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 137
+; CHECK-RV32-NEXT:    li a4, 136
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 512
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_672
+; CHECK-RV32-NEXT:    j .LBB61_147
+; CHECK-RV32-NEXT:  .LBB61_672: # %cond.load545
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 138
+; CHECK-RV32-NEXT:    li a4, 137
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_673
+; CHECK-RV32-NEXT:    j .LBB61_148
+; CHECK-RV32-NEXT:  .LBB61_673: # %cond.load549
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 139
+; CHECK-RV32-NEXT:    li a4, 138
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 20
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_674
+; CHECK-RV32-NEXT:    j .LBB61_149
+; CHECK-RV32-NEXT:  .LBB61_674: # %cond.load553
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 140
+; CHECK-RV32-NEXT:    li a4, 139
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 19
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_675
+; CHECK-RV32-NEXT:    j .LBB61_150
+; CHECK-RV32-NEXT:  .LBB61_675: # %cond.load557
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 141
+; CHECK-RV32-NEXT:    li a4, 140
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 18
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_676
+; CHECK-RV32-NEXT:    j .LBB61_151
+; CHECK-RV32-NEXT:  .LBB61_676: # %cond.load561
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 142
+; CHECK-RV32-NEXT:    li a4, 141
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 17
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_677
+; CHECK-RV32-NEXT:    j .LBB61_152
+; CHECK-RV32-NEXT:  .LBB61_677: # %cond.load565
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 143
+; CHECK-RV32-NEXT:    li a4, 142
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 16
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_678
+; CHECK-RV32-NEXT:    j .LBB61_153
+; CHECK-RV32-NEXT:  .LBB61_678: # %cond.load569
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 144
+; CHECK-RV32-NEXT:    li a4, 143
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 15
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_679
+; CHECK-RV32-NEXT:    j .LBB61_154
+; CHECK-RV32-NEXT:  .LBB61_679: # %cond.load573
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 145
+; CHECK-RV32-NEXT:    li a4, 144
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 14
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_680
+; CHECK-RV32-NEXT:    j .LBB61_155
+; CHECK-RV32-NEXT:  .LBB61_680: # %cond.load577
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 146
+; CHECK-RV32-NEXT:    li a4, 145
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 13
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_681
+; CHECK-RV32-NEXT:    j .LBB61_156
+; CHECK-RV32-NEXT:  .LBB61_681: # %cond.load581
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 147
+; CHECK-RV32-NEXT:    li a4, 146
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 12
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_682
+; CHECK-RV32-NEXT:    j .LBB61_157
+; CHECK-RV32-NEXT:  .LBB61_682: # %cond.load585
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 148
+; CHECK-RV32-NEXT:    li a4, 147
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 11
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_683
+; CHECK-RV32-NEXT:    j .LBB61_158
+; CHECK-RV32-NEXT:  .LBB61_683: # %cond.load589
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 149
+; CHECK-RV32-NEXT:    li a4, 148
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 10
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_684
+; CHECK-RV32-NEXT:    j .LBB61_159
+; CHECK-RV32-NEXT:  .LBB61_684: # %cond.load593
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 150
+; CHECK-RV32-NEXT:    li a4, 149
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 9
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_685
+; CHECK-RV32-NEXT:    j .LBB61_160
+; CHECK-RV32-NEXT:  .LBB61_685: # %cond.load597
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 151
+; CHECK-RV32-NEXT:    li a4, 150
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 8
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_686
+; CHECK-RV32-NEXT:    j .LBB61_161
+; CHECK-RV32-NEXT:  .LBB61_686: # %cond.load601
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 152
+; CHECK-RV32-NEXT:    li a4, 151
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 7
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_687
+; CHECK-RV32-NEXT:    j .LBB61_162
+; CHECK-RV32-NEXT:  .LBB61_687: # %cond.load605
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 153
+; CHECK-RV32-NEXT:    li a4, 152
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 6
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_688
+; CHECK-RV32-NEXT:    j .LBB61_163
+; CHECK-RV32-NEXT:  .LBB61_688: # %cond.load609
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 154
+; CHECK-RV32-NEXT:    li a4, 153
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 5
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_689
+; CHECK-RV32-NEXT:    j .LBB61_164
+; CHECK-RV32-NEXT:  .LBB61_689: # %cond.load613
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 155
+; CHECK-RV32-NEXT:    li a4, 154
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 4
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_690
+; CHECK-RV32-NEXT:    j .LBB61_165
+; CHECK-RV32-NEXT:  .LBB61_690: # %cond.load617
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 156
+; CHECK-RV32-NEXT:    li a4, 155
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 3
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_691
+; CHECK-RV32-NEXT:    j .LBB61_166
+; CHECK-RV32-NEXT:  .LBB61_691: # %cond.load621
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 157
+; CHECK-RV32-NEXT:    li a4, 156
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 2
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_1029
+; CHECK-RV32-NEXT:    j .LBB61_167
+; CHECK-RV32-NEXT:  .LBB61_1029: # %cond.load621
+; CHECK-RV32-NEXT:    j .LBB61_168
+; CHECK-RV32-NEXT:  .LBB61_692: # %cond.load633
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 160
+; CHECK-RV32-NEXT:    li a4, 159
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 1
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_693
+; CHECK-RV32-NEXT:    j .LBB61_172
+; CHECK-RV32-NEXT:  .LBB61_693: # %cond.load637
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 161
+; CHECK-RV32-NEXT:    li a4, 160
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 2
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_694
+; CHECK-RV32-NEXT:    j .LBB61_173
+; CHECK-RV32-NEXT:  .LBB61_694: # %cond.load641
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 162
+; CHECK-RV32-NEXT:    li a4, 161
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 4
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_695
+; CHECK-RV32-NEXT:    j .LBB61_174
+; CHECK-RV32-NEXT:  .LBB61_695: # %cond.load645
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 163
+; CHECK-RV32-NEXT:    li a4, 162
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 8
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_696
+; CHECK-RV32-NEXT:    j .LBB61_175
+; CHECK-RV32-NEXT:  .LBB61_696: # %cond.load649
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 164
+; CHECK-RV32-NEXT:    li a4, 163
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 16
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_697
+; CHECK-RV32-NEXT:    j .LBB61_176
+; CHECK-RV32-NEXT:  .LBB61_697: # %cond.load653
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 165
+; CHECK-RV32-NEXT:    li a4, 164
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 32
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_698
+; CHECK-RV32-NEXT:    j .LBB61_177
+; CHECK-RV32-NEXT:  .LBB61_698: # %cond.load657
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 166
+; CHECK-RV32-NEXT:    li a4, 165
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 64
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_699
+; CHECK-RV32-NEXT:    j .LBB61_178
+; CHECK-RV32-NEXT:  .LBB61_699: # %cond.load661
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 167
+; CHECK-RV32-NEXT:    li a4, 166
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 128
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_700
+; CHECK-RV32-NEXT:    j .LBB61_179
+; CHECK-RV32-NEXT:  .LBB61_700: # %cond.load665
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 168
+; CHECK-RV32-NEXT:    li a4, 167
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 256
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_701
+; CHECK-RV32-NEXT:    j .LBB61_180
+; CHECK-RV32-NEXT:  .LBB61_701: # %cond.load669
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 169
+; CHECK-RV32-NEXT:    li a4, 168
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 512
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_702
+; CHECK-RV32-NEXT:    j .LBB61_181
+; CHECK-RV32-NEXT:  .LBB61_702: # %cond.load673
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 170
+; CHECK-RV32-NEXT:    li a4, 169
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_703
+; CHECK-RV32-NEXT:    j .LBB61_182
+; CHECK-RV32-NEXT:  .LBB61_703: # %cond.load677
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 171
+; CHECK-RV32-NEXT:    li a4, 170
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 20
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_704
+; CHECK-RV32-NEXT:    j .LBB61_183
+; CHECK-RV32-NEXT:  .LBB61_704: # %cond.load681
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 172
+; CHECK-RV32-NEXT:    li a4, 171
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 19
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_705
+; CHECK-RV32-NEXT:    j .LBB61_184
+; CHECK-RV32-NEXT:  .LBB61_705: # %cond.load685
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 173
+; CHECK-RV32-NEXT:    li a4, 172
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 18
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_706
+; CHECK-RV32-NEXT:    j .LBB61_185
+; CHECK-RV32-NEXT:  .LBB61_706: # %cond.load689
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 174
+; CHECK-RV32-NEXT:    li a4, 173
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 17
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_707
+; CHECK-RV32-NEXT:    j .LBB61_186
+; CHECK-RV32-NEXT:  .LBB61_707: # %cond.load693
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 175
+; CHECK-RV32-NEXT:    li a4, 174
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 16
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_708
+; CHECK-RV32-NEXT:    j .LBB61_187
+; CHECK-RV32-NEXT:  .LBB61_708: # %cond.load697
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 176
+; CHECK-RV32-NEXT:    li a4, 175
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 15
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_709
+; CHECK-RV32-NEXT:    j .LBB61_188
+; CHECK-RV32-NEXT:  .LBB61_709: # %cond.load701
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 177
+; CHECK-RV32-NEXT:    li a4, 176
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 14
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_710
+; CHECK-RV32-NEXT:    j .LBB61_189
+; CHECK-RV32-NEXT:  .LBB61_710: # %cond.load705
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 178
+; CHECK-RV32-NEXT:    li a4, 177
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 13
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_711
+; CHECK-RV32-NEXT:    j .LBB61_190
+; CHECK-RV32-NEXT:  .LBB61_711: # %cond.load709
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 179
+; CHECK-RV32-NEXT:    li a4, 178
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 12
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_712
+; CHECK-RV32-NEXT:    j .LBB61_191
+; CHECK-RV32-NEXT:  .LBB61_712: # %cond.load713
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 180
+; CHECK-RV32-NEXT:    li a4, 179
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 11
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_713
+; CHECK-RV32-NEXT:    j .LBB61_192
+; CHECK-RV32-NEXT:  .LBB61_713: # %cond.load717
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 181
+; CHECK-RV32-NEXT:    li a4, 180
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 10
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_714
+; CHECK-RV32-NEXT:    j .LBB61_193
+; CHECK-RV32-NEXT:  .LBB61_714: # %cond.load721
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 182
+; CHECK-RV32-NEXT:    li a4, 181
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 9
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_715
+; CHECK-RV32-NEXT:    j .LBB61_194
+; CHECK-RV32-NEXT:  .LBB61_715: # %cond.load725
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 183
+; CHECK-RV32-NEXT:    li a4, 182
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 8
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_716
+; CHECK-RV32-NEXT:    j .LBB61_195
+; CHECK-RV32-NEXT:  .LBB61_716: # %cond.load729
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 184
+; CHECK-RV32-NEXT:    li a4, 183
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 7
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_717
+; CHECK-RV32-NEXT:    j .LBB61_196
+; CHECK-RV32-NEXT:  .LBB61_717: # %cond.load733
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 185
+; CHECK-RV32-NEXT:    li a4, 184
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 6
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_718
+; CHECK-RV32-NEXT:    j .LBB61_197
+; CHECK-RV32-NEXT:  .LBB61_718: # %cond.load737
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 186
+; CHECK-RV32-NEXT:    li a4, 185
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 5
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_719
+; CHECK-RV32-NEXT:    j .LBB61_198
+; CHECK-RV32-NEXT:  .LBB61_719: # %cond.load741
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 187
+; CHECK-RV32-NEXT:    li a4, 186
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 4
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_720
+; CHECK-RV32-NEXT:    j .LBB61_199
+; CHECK-RV32-NEXT:  .LBB61_720: # %cond.load745
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 188
+; CHECK-RV32-NEXT:    li a4, 187
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 3
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_721
+; CHECK-RV32-NEXT:    j .LBB61_200
+; CHECK-RV32-NEXT:  .LBB61_721: # %cond.load749
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 189
+; CHECK-RV32-NEXT:    li a4, 188
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 2
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_1030
+; CHECK-RV32-NEXT:    j .LBB61_201
+; CHECK-RV32-NEXT:  .LBB61_1030: # %cond.load749
+; CHECK-RV32-NEXT:    j .LBB61_202
+; CHECK-RV32-NEXT:  .LBB61_722: # %cond.load761
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 192
+; CHECK-RV32-NEXT:    li a4, 191
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 1
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_723
+; CHECK-RV32-NEXT:    j .LBB61_206
+; CHECK-RV32-NEXT:  .LBB61_723: # %cond.load765
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 193
+; CHECK-RV32-NEXT:    li a4, 192
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 2
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_724
+; CHECK-RV32-NEXT:    j .LBB61_207
+; CHECK-RV32-NEXT:  .LBB61_724: # %cond.load769
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 194
+; CHECK-RV32-NEXT:    li a4, 193
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 4
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_725
+; CHECK-RV32-NEXT:    j .LBB61_208
+; CHECK-RV32-NEXT:  .LBB61_725: # %cond.load773
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 195
+; CHECK-RV32-NEXT:    li a4, 194
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 8
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_726
+; CHECK-RV32-NEXT:    j .LBB61_209
+; CHECK-RV32-NEXT:  .LBB61_726: # %cond.load777
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 196
+; CHECK-RV32-NEXT:    li a4, 195
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 16
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_727
+; CHECK-RV32-NEXT:    j .LBB61_210
+; CHECK-RV32-NEXT:  .LBB61_727: # %cond.load781
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 197
+; CHECK-RV32-NEXT:    li a4, 196
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 32
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_728
+; CHECK-RV32-NEXT:    j .LBB61_211
+; CHECK-RV32-NEXT:  .LBB61_728: # %cond.load785
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 198
+; CHECK-RV32-NEXT:    li a4, 197
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 64
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_729
+; CHECK-RV32-NEXT:    j .LBB61_212
+; CHECK-RV32-NEXT:  .LBB61_729: # %cond.load789
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 199
+; CHECK-RV32-NEXT:    li a4, 198
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 128
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_730
+; CHECK-RV32-NEXT:    j .LBB61_213
+; CHECK-RV32-NEXT:  .LBB61_730: # %cond.load793
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 200
+; CHECK-RV32-NEXT:    li a4, 199
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 256
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_731
+; CHECK-RV32-NEXT:    j .LBB61_214
+; CHECK-RV32-NEXT:  .LBB61_731: # %cond.load797
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 201
+; CHECK-RV32-NEXT:    li a4, 200
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 512
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_732
+; CHECK-RV32-NEXT:    j .LBB61_215
+; CHECK-RV32-NEXT:  .LBB61_732: # %cond.load801
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 202
+; CHECK-RV32-NEXT:    li a4, 201
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_733
+; CHECK-RV32-NEXT:    j .LBB61_216
+; CHECK-RV32-NEXT:  .LBB61_733: # %cond.load805
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 203
+; CHECK-RV32-NEXT:    li a4, 202
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 20
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_734
+; CHECK-RV32-NEXT:    j .LBB61_217
+; CHECK-RV32-NEXT:  .LBB61_734: # %cond.load809
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 204
+; CHECK-RV32-NEXT:    li a4, 203
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 19
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_735
+; CHECK-RV32-NEXT:    j .LBB61_218
+; CHECK-RV32-NEXT:  .LBB61_735: # %cond.load813
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 205
+; CHECK-RV32-NEXT:    li a4, 204
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 18
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_736
+; CHECK-RV32-NEXT:    j .LBB61_219
+; CHECK-RV32-NEXT:  .LBB61_736: # %cond.load817
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 206
+; CHECK-RV32-NEXT:    li a4, 205
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 17
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_737
+; CHECK-RV32-NEXT:    j .LBB61_220
+; CHECK-RV32-NEXT:  .LBB61_737: # %cond.load821
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 207
+; CHECK-RV32-NEXT:    li a4, 206
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 16
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_738
+; CHECK-RV32-NEXT:    j .LBB61_221
+; CHECK-RV32-NEXT:  .LBB61_738: # %cond.load825
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 208
+; CHECK-RV32-NEXT:    li a4, 207
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 15
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_739
+; CHECK-RV32-NEXT:    j .LBB61_222
+; CHECK-RV32-NEXT:  .LBB61_739: # %cond.load829
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 209
+; CHECK-RV32-NEXT:    li a4, 208
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 14
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_740
+; CHECK-RV32-NEXT:    j .LBB61_223
+; CHECK-RV32-NEXT:  .LBB61_740: # %cond.load833
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 210
+; CHECK-RV32-NEXT:    li a4, 209
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 13
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_741
+; CHECK-RV32-NEXT:    j .LBB61_224
+; CHECK-RV32-NEXT:  .LBB61_741: # %cond.load837
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 211
+; CHECK-RV32-NEXT:    li a4, 210
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 12
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_742
+; CHECK-RV32-NEXT:    j .LBB61_225
+; CHECK-RV32-NEXT:  .LBB61_742: # %cond.load841
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 212
+; CHECK-RV32-NEXT:    li a4, 211
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 11
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_743
+; CHECK-RV32-NEXT:    j .LBB61_226
+; CHECK-RV32-NEXT:  .LBB61_743: # %cond.load845
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 213
+; CHECK-RV32-NEXT:    li a4, 212
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 10
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_744
+; CHECK-RV32-NEXT:    j .LBB61_227
+; CHECK-RV32-NEXT:  .LBB61_744: # %cond.load849
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 214
+; CHECK-RV32-NEXT:    li a4, 213
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 9
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_745
+; CHECK-RV32-NEXT:    j .LBB61_228
+; CHECK-RV32-NEXT:  .LBB61_745: # %cond.load853
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 215
+; CHECK-RV32-NEXT:    li a4, 214
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 8
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_746
+; CHECK-RV32-NEXT:    j .LBB61_229
+; CHECK-RV32-NEXT:  .LBB61_746: # %cond.load857
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 216
+; CHECK-RV32-NEXT:    li a4, 215
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 7
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_747
+; CHECK-RV32-NEXT:    j .LBB61_230
+; CHECK-RV32-NEXT:  .LBB61_747: # %cond.load861
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 217
+; CHECK-RV32-NEXT:    li a4, 216
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 6
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_748
+; CHECK-RV32-NEXT:    j .LBB61_231
+; CHECK-RV32-NEXT:  .LBB61_748: # %cond.load865
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 218
+; CHECK-RV32-NEXT:    li a4, 217
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 5
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_749
+; CHECK-RV32-NEXT:    j .LBB61_232
+; CHECK-RV32-NEXT:  .LBB61_749: # %cond.load869
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 219
+; CHECK-RV32-NEXT:    li a4, 218
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 4
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_750
+; CHECK-RV32-NEXT:    j .LBB61_233
+; CHECK-RV32-NEXT:  .LBB61_750: # %cond.load873
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 220
+; CHECK-RV32-NEXT:    li a4, 219
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 3
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_751
+; CHECK-RV32-NEXT:    j .LBB61_234
+; CHECK-RV32-NEXT:  .LBB61_751: # %cond.load877
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 221
+; CHECK-RV32-NEXT:    li a4, 220
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 2
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_1031
+; CHECK-RV32-NEXT:    j .LBB61_235
+; CHECK-RV32-NEXT:  .LBB61_1031: # %cond.load877
+; CHECK-RV32-NEXT:    j .LBB61_236
+; CHECK-RV32-NEXT:  .LBB61_752: # %cond.load889
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 224
+; CHECK-RV32-NEXT:    li a4, 223
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 1
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_753
+; CHECK-RV32-NEXT:    j .LBB61_240
+; CHECK-RV32-NEXT:  .LBB61_753: # %cond.load893
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 225
+; CHECK-RV32-NEXT:    li a4, 224
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 2
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_754
+; CHECK-RV32-NEXT:    j .LBB61_241
+; CHECK-RV32-NEXT:  .LBB61_754: # %cond.load897
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 226
+; CHECK-RV32-NEXT:    li a4, 225
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 4
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_755
+; CHECK-RV32-NEXT:    j .LBB61_242
+; CHECK-RV32-NEXT:  .LBB61_755: # %cond.load901
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 227
+; CHECK-RV32-NEXT:    li a4, 226
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 8
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_756
+; CHECK-RV32-NEXT:    j .LBB61_243
+; CHECK-RV32-NEXT:  .LBB61_756: # %cond.load905
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 228
+; CHECK-RV32-NEXT:    li a4, 227
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 16
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_757
+; CHECK-RV32-NEXT:    j .LBB61_244
+; CHECK-RV32-NEXT:  .LBB61_757: # %cond.load909
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 229
+; CHECK-RV32-NEXT:    li a4, 228
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 32
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_758
+; CHECK-RV32-NEXT:    j .LBB61_245
+; CHECK-RV32-NEXT:  .LBB61_758: # %cond.load913
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 230
+; CHECK-RV32-NEXT:    li a4, 229
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 64
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_759
+; CHECK-RV32-NEXT:    j .LBB61_246
+; CHECK-RV32-NEXT:  .LBB61_759: # %cond.load917
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 231
+; CHECK-RV32-NEXT:    li a4, 230
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 128
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_760
+; CHECK-RV32-NEXT:    j .LBB61_247
+; CHECK-RV32-NEXT:  .LBB61_760: # %cond.load921
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 232
+; CHECK-RV32-NEXT:    li a4, 231
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 256
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_761
+; CHECK-RV32-NEXT:    j .LBB61_248
+; CHECK-RV32-NEXT:  .LBB61_761: # %cond.load925
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 233
+; CHECK-RV32-NEXT:    li a4, 232
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 512
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_762
+; CHECK-RV32-NEXT:    j .LBB61_249
+; CHECK-RV32-NEXT:  .LBB61_762: # %cond.load929
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 234
+; CHECK-RV32-NEXT:    li a4, 233
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_763
+; CHECK-RV32-NEXT:    j .LBB61_250
+; CHECK-RV32-NEXT:  .LBB61_763: # %cond.load933
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 235
+; CHECK-RV32-NEXT:    li a4, 234
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 20
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_764
+; CHECK-RV32-NEXT:    j .LBB61_251
+; CHECK-RV32-NEXT:  .LBB61_764: # %cond.load937
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 236
+; CHECK-RV32-NEXT:    li a4, 235
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 19
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_765
+; CHECK-RV32-NEXT:    j .LBB61_252
+; CHECK-RV32-NEXT:  .LBB61_765: # %cond.load941
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 237
+; CHECK-RV32-NEXT:    li a4, 236
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 18
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_766
+; CHECK-RV32-NEXT:    j .LBB61_253
+; CHECK-RV32-NEXT:  .LBB61_766: # %cond.load945
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 238
+; CHECK-RV32-NEXT:    li a4, 237
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 17
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_767
+; CHECK-RV32-NEXT:    j .LBB61_254
+; CHECK-RV32-NEXT:  .LBB61_767: # %cond.load949
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 239
+; CHECK-RV32-NEXT:    li a4, 238
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 16
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_768
+; CHECK-RV32-NEXT:    j .LBB61_255
+; CHECK-RV32-NEXT:  .LBB61_768: # %cond.load953
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 240
+; CHECK-RV32-NEXT:    li a4, 239
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 15
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_769
+; CHECK-RV32-NEXT:    j .LBB61_256
+; CHECK-RV32-NEXT:  .LBB61_769: # %cond.load957
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 241
+; CHECK-RV32-NEXT:    li a4, 240
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 14
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_770
+; CHECK-RV32-NEXT:    j .LBB61_257
+; CHECK-RV32-NEXT:  .LBB61_770: # %cond.load961
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 242
+; CHECK-RV32-NEXT:    li a4, 241
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 13
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_771
+; CHECK-RV32-NEXT:    j .LBB61_258
+; CHECK-RV32-NEXT:  .LBB61_771: # %cond.load965
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 243
+; CHECK-RV32-NEXT:    li a4, 242
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 12
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_772
+; CHECK-RV32-NEXT:    j .LBB61_259
+; CHECK-RV32-NEXT:  .LBB61_772: # %cond.load969
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 244
+; CHECK-RV32-NEXT:    li a4, 243
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 11
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_773
+; CHECK-RV32-NEXT:    j .LBB61_260
+; CHECK-RV32-NEXT:  .LBB61_773: # %cond.load973
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 245
+; CHECK-RV32-NEXT:    li a4, 244
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 10
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_774
+; CHECK-RV32-NEXT:    j .LBB61_261
+; CHECK-RV32-NEXT:  .LBB61_774: # %cond.load977
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 246
+; CHECK-RV32-NEXT:    li a4, 245
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 9
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_775
+; CHECK-RV32-NEXT:    j .LBB61_262
+; CHECK-RV32-NEXT:  .LBB61_775: # %cond.load981
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 247
+; CHECK-RV32-NEXT:    li a4, 246
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 8
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_776
+; CHECK-RV32-NEXT:    j .LBB61_263
+; CHECK-RV32-NEXT:  .LBB61_776: # %cond.load985
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 248
+; CHECK-RV32-NEXT:    li a4, 247
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 7
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_777
+; CHECK-RV32-NEXT:    j .LBB61_264
+; CHECK-RV32-NEXT:  .LBB61_777: # %cond.load989
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 249
+; CHECK-RV32-NEXT:    li a4, 248
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 6
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_778
+; CHECK-RV32-NEXT:    j .LBB61_265
+; CHECK-RV32-NEXT:  .LBB61_778: # %cond.load993
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 250
+; CHECK-RV32-NEXT:    li a4, 249
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 5
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_779
+; CHECK-RV32-NEXT:    j .LBB61_266
+; CHECK-RV32-NEXT:  .LBB61_779: # %cond.load997
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 251
+; CHECK-RV32-NEXT:    li a4, 250
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 4
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_780
+; CHECK-RV32-NEXT:    j .LBB61_267
+; CHECK-RV32-NEXT:  .LBB61_780: # %cond.load1001
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 252
+; CHECK-RV32-NEXT:    li a4, 251
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 3
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_781
+; CHECK-RV32-NEXT:    j .LBB61_268
+; CHECK-RV32-NEXT:  .LBB61_781: # %cond.load1005
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 253
+; CHECK-RV32-NEXT:    li a4, 252
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 2
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_1032
+; CHECK-RV32-NEXT:    j .LBB61_269
+; CHECK-RV32-NEXT:  .LBB61_1032: # %cond.load1005
+; CHECK-RV32-NEXT:    j .LBB61_270
+; CHECK-RV32-NEXT:  .LBB61_782: # %cond.load1017
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 256
+; CHECK-RV32-NEXT:    li a4, 255
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 1
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_783
+; CHECK-RV32-NEXT:    j .LBB61_274
+; CHECK-RV32-NEXT:  .LBB61_783: # %cond.load1021
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 257
+; CHECK-RV32-NEXT:    li a4, 256
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 2
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_784
+; CHECK-RV32-NEXT:    j .LBB61_275
+; CHECK-RV32-NEXT:  .LBB61_784: # %cond.load1025
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 258
+; CHECK-RV32-NEXT:    li a4, 257
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 4
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_785
+; CHECK-RV32-NEXT:    j .LBB61_276
+; CHECK-RV32-NEXT:  .LBB61_785: # %cond.load1029
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 259
+; CHECK-RV32-NEXT:    li a4, 258
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 8
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_786
+; CHECK-RV32-NEXT:    j .LBB61_277
+; CHECK-RV32-NEXT:  .LBB61_786: # %cond.load1033
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 260
+; CHECK-RV32-NEXT:    li a4, 259
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 16
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_787
+; CHECK-RV32-NEXT:    j .LBB61_278
+; CHECK-RV32-NEXT:  .LBB61_787: # %cond.load1037
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 261
+; CHECK-RV32-NEXT:    li a4, 260
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 32
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_788
+; CHECK-RV32-NEXT:    j .LBB61_279
+; CHECK-RV32-NEXT:  .LBB61_788: # %cond.load1041
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 262
+; CHECK-RV32-NEXT:    li a4, 261
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 64
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_789
+; CHECK-RV32-NEXT:    j .LBB61_280
+; CHECK-RV32-NEXT:  .LBB61_789: # %cond.load1045
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 263
+; CHECK-RV32-NEXT:    li a4, 262
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 128
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_790
+; CHECK-RV32-NEXT:    j .LBB61_281
+; CHECK-RV32-NEXT:  .LBB61_790: # %cond.load1049
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 264
+; CHECK-RV32-NEXT:    li a4, 263
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 256
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_791
+; CHECK-RV32-NEXT:    j .LBB61_282
+; CHECK-RV32-NEXT:  .LBB61_791: # %cond.load1053
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 265
+; CHECK-RV32-NEXT:    li a4, 264
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 512
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_792
+; CHECK-RV32-NEXT:    j .LBB61_283
+; CHECK-RV32-NEXT:  .LBB61_792: # %cond.load1057
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 266
+; CHECK-RV32-NEXT:    li a4, 265
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_793
+; CHECK-RV32-NEXT:    j .LBB61_284
+; CHECK-RV32-NEXT:  .LBB61_793: # %cond.load1061
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 267
+; CHECK-RV32-NEXT:    li a4, 266
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 20
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_794
+; CHECK-RV32-NEXT:    j .LBB61_285
+; CHECK-RV32-NEXT:  .LBB61_794: # %cond.load1065
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 268
+; CHECK-RV32-NEXT:    li a4, 267
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 19
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_795
+; CHECK-RV32-NEXT:    j .LBB61_286
+; CHECK-RV32-NEXT:  .LBB61_795: # %cond.load1069
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 269
+; CHECK-RV32-NEXT:    li a4, 268
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 18
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_796
+; CHECK-RV32-NEXT:    j .LBB61_287
+; CHECK-RV32-NEXT:  .LBB61_796: # %cond.load1073
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 270
+; CHECK-RV32-NEXT:    li a4, 269
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 17
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_797
+; CHECK-RV32-NEXT:    j .LBB61_288
+; CHECK-RV32-NEXT:  .LBB61_797: # %cond.load1077
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 271
+; CHECK-RV32-NEXT:    li a4, 270
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 16
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_798
+; CHECK-RV32-NEXT:    j .LBB61_289
+; CHECK-RV32-NEXT:  .LBB61_798: # %cond.load1081
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 272
+; CHECK-RV32-NEXT:    li a4, 271
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 15
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_799
+; CHECK-RV32-NEXT:    j .LBB61_290
+; CHECK-RV32-NEXT:  .LBB61_799: # %cond.load1085
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 273
+; CHECK-RV32-NEXT:    li a4, 272
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 14
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_800
+; CHECK-RV32-NEXT:    j .LBB61_291
+; CHECK-RV32-NEXT:  .LBB61_800: # %cond.load1089
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 274
+; CHECK-RV32-NEXT:    li a4, 273
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 13
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_801
+; CHECK-RV32-NEXT:    j .LBB61_292
+; CHECK-RV32-NEXT:  .LBB61_801: # %cond.load1093
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 275
+; CHECK-RV32-NEXT:    li a4, 274
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 12
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_802
+; CHECK-RV32-NEXT:    j .LBB61_293
+; CHECK-RV32-NEXT:  .LBB61_802: # %cond.load1097
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 276
+; CHECK-RV32-NEXT:    li a4, 275
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 11
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_803
+; CHECK-RV32-NEXT:    j .LBB61_294
+; CHECK-RV32-NEXT:  .LBB61_803: # %cond.load1101
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 277
+; CHECK-RV32-NEXT:    li a4, 276
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 10
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_804
+; CHECK-RV32-NEXT:    j .LBB61_295
+; CHECK-RV32-NEXT:  .LBB61_804: # %cond.load1105
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 278
+; CHECK-RV32-NEXT:    li a4, 277
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 9
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_805
+; CHECK-RV32-NEXT:    j .LBB61_296
+; CHECK-RV32-NEXT:  .LBB61_805: # %cond.load1109
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 279
+; CHECK-RV32-NEXT:    li a4, 278
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 8
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_806
+; CHECK-RV32-NEXT:    j .LBB61_297
+; CHECK-RV32-NEXT:  .LBB61_806: # %cond.load1113
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 280
+; CHECK-RV32-NEXT:    li a4, 279
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 7
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_807
+; CHECK-RV32-NEXT:    j .LBB61_298
+; CHECK-RV32-NEXT:  .LBB61_807: # %cond.load1117
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 281
+; CHECK-RV32-NEXT:    li a4, 280
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 6
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_808
+; CHECK-RV32-NEXT:    j .LBB61_299
+; CHECK-RV32-NEXT:  .LBB61_808: # %cond.load1121
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 282
+; CHECK-RV32-NEXT:    li a4, 281
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 5
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_809
+; CHECK-RV32-NEXT:    j .LBB61_300
+; CHECK-RV32-NEXT:  .LBB61_809: # %cond.load1125
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 283
+; CHECK-RV32-NEXT:    li a4, 282
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 4
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_810
+; CHECK-RV32-NEXT:    j .LBB61_301
+; CHECK-RV32-NEXT:  .LBB61_810: # %cond.load1129
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 284
+; CHECK-RV32-NEXT:    li a4, 283
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 3
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_811
+; CHECK-RV32-NEXT:    j .LBB61_302
+; CHECK-RV32-NEXT:  .LBB61_811: # %cond.load1133
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 285
+; CHECK-RV32-NEXT:    li a4, 284
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 2
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_1033
+; CHECK-RV32-NEXT:    j .LBB61_303
+; CHECK-RV32-NEXT:  .LBB61_1033: # %cond.load1133
+; CHECK-RV32-NEXT:    j .LBB61_304
+; CHECK-RV32-NEXT:  .LBB61_812: # %cond.load1145
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 288
+; CHECK-RV32-NEXT:    li a4, 287
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 1
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_813
+; CHECK-RV32-NEXT:    j .LBB61_308
+; CHECK-RV32-NEXT:  .LBB61_813: # %cond.load1149
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 289
+; CHECK-RV32-NEXT:    li a4, 288
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 2
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_814
+; CHECK-RV32-NEXT:    j .LBB61_309
+; CHECK-RV32-NEXT:  .LBB61_814: # %cond.load1153
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 290
+; CHECK-RV32-NEXT:    li a4, 289
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 4
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_815
+; CHECK-RV32-NEXT:    j .LBB61_310
+; CHECK-RV32-NEXT:  .LBB61_815: # %cond.load1157
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 291
+; CHECK-RV32-NEXT:    li a4, 290
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 8
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_816
+; CHECK-RV32-NEXT:    j .LBB61_311
+; CHECK-RV32-NEXT:  .LBB61_816: # %cond.load1161
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 292
+; CHECK-RV32-NEXT:    li a4, 291
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 16
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_817
+; CHECK-RV32-NEXT:    j .LBB61_312
+; CHECK-RV32-NEXT:  .LBB61_817: # %cond.load1165
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 293
+; CHECK-RV32-NEXT:    li a4, 292
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 32
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_818
+; CHECK-RV32-NEXT:    j .LBB61_313
+; CHECK-RV32-NEXT:  .LBB61_818: # %cond.load1169
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 294
+; CHECK-RV32-NEXT:    li a4, 293
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 64
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_819
+; CHECK-RV32-NEXT:    j .LBB61_314
+; CHECK-RV32-NEXT:  .LBB61_819: # %cond.load1173
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 295
+; CHECK-RV32-NEXT:    li a4, 294
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 128
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_820
+; CHECK-RV32-NEXT:    j .LBB61_315
+; CHECK-RV32-NEXT:  .LBB61_820: # %cond.load1177
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 296
+; CHECK-RV32-NEXT:    li a4, 295
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 256
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_821
+; CHECK-RV32-NEXT:    j .LBB61_316
+; CHECK-RV32-NEXT:  .LBB61_821: # %cond.load1181
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 297
+; CHECK-RV32-NEXT:    li a4, 296
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 512
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_822
+; CHECK-RV32-NEXT:    j .LBB61_317
+; CHECK-RV32-NEXT:  .LBB61_822: # %cond.load1185
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 298
+; CHECK-RV32-NEXT:    li a4, 297
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_823
+; CHECK-RV32-NEXT:    j .LBB61_318
+; CHECK-RV32-NEXT:  .LBB61_823: # %cond.load1189
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 299
+; CHECK-RV32-NEXT:    li a4, 298
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 20
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_824
+; CHECK-RV32-NEXT:    j .LBB61_319
+; CHECK-RV32-NEXT:  .LBB61_824: # %cond.load1193
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 300
+; CHECK-RV32-NEXT:    li a4, 299
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 19
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_825
+; CHECK-RV32-NEXT:    j .LBB61_320
+; CHECK-RV32-NEXT:  .LBB61_825: # %cond.load1197
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 301
+; CHECK-RV32-NEXT:    li a4, 300
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 18
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_826
+; CHECK-RV32-NEXT:    j .LBB61_321
+; CHECK-RV32-NEXT:  .LBB61_826: # %cond.load1201
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 302
+; CHECK-RV32-NEXT:    li a4, 301
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 17
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_827
+; CHECK-RV32-NEXT:    j .LBB61_322
+; CHECK-RV32-NEXT:  .LBB61_827: # %cond.load1205
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 303
+; CHECK-RV32-NEXT:    li a4, 302
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 16
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_828
+; CHECK-RV32-NEXT:    j .LBB61_323
+; CHECK-RV32-NEXT:  .LBB61_828: # %cond.load1209
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 304
+; CHECK-RV32-NEXT:    li a4, 303
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 15
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_829
+; CHECK-RV32-NEXT:    j .LBB61_324
+; CHECK-RV32-NEXT:  .LBB61_829: # %cond.load1213
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 305
+; CHECK-RV32-NEXT:    li a4, 304
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 14
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_830
+; CHECK-RV32-NEXT:    j .LBB61_325
+; CHECK-RV32-NEXT:  .LBB61_830: # %cond.load1217
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 306
+; CHECK-RV32-NEXT:    li a4, 305
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 13
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_831
+; CHECK-RV32-NEXT:    j .LBB61_326
+; CHECK-RV32-NEXT:  .LBB61_831: # %cond.load1221
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 307
+; CHECK-RV32-NEXT:    li a4, 306
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 12
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_832
+; CHECK-RV32-NEXT:    j .LBB61_327
+; CHECK-RV32-NEXT:  .LBB61_832: # %cond.load1225
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 308
+; CHECK-RV32-NEXT:    li a4, 307
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 11
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_833
+; CHECK-RV32-NEXT:    j .LBB61_328
+; CHECK-RV32-NEXT:  .LBB61_833: # %cond.load1229
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 309
+; CHECK-RV32-NEXT:    li a4, 308
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 10
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_834
+; CHECK-RV32-NEXT:    j .LBB61_329
+; CHECK-RV32-NEXT:  .LBB61_834: # %cond.load1233
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 310
+; CHECK-RV32-NEXT:    li a4, 309
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 9
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_835
+; CHECK-RV32-NEXT:    j .LBB61_330
+; CHECK-RV32-NEXT:  .LBB61_835: # %cond.load1237
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 311
+; CHECK-RV32-NEXT:    li a4, 310
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 8
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_836
+; CHECK-RV32-NEXT:    j .LBB61_331
+; CHECK-RV32-NEXT:  .LBB61_836: # %cond.load1241
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 312
+; CHECK-RV32-NEXT:    li a4, 311
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 7
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_837
+; CHECK-RV32-NEXT:    j .LBB61_332
+; CHECK-RV32-NEXT:  .LBB61_837: # %cond.load1245
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 313
+; CHECK-RV32-NEXT:    li a4, 312
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 6
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_838
+; CHECK-RV32-NEXT:    j .LBB61_333
+; CHECK-RV32-NEXT:  .LBB61_838: # %cond.load1249
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 314
+; CHECK-RV32-NEXT:    li a4, 313
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 5
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_839
+; CHECK-RV32-NEXT:    j .LBB61_334
+; CHECK-RV32-NEXT:  .LBB61_839: # %cond.load1253
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 315
+; CHECK-RV32-NEXT:    li a4, 314
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 4
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_840
+; CHECK-RV32-NEXT:    j .LBB61_335
+; CHECK-RV32-NEXT:  .LBB61_840: # %cond.load1257
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 316
+; CHECK-RV32-NEXT:    li a4, 315
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 3
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_841
+; CHECK-RV32-NEXT:    j .LBB61_336
+; CHECK-RV32-NEXT:  .LBB61_841: # %cond.load1261
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 317
+; CHECK-RV32-NEXT:    li a4, 316
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 2
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_1034
+; CHECK-RV32-NEXT:    j .LBB61_337
+; CHECK-RV32-NEXT:  .LBB61_1034: # %cond.load1261
+; CHECK-RV32-NEXT:    j .LBB61_338
+; CHECK-RV32-NEXT:  .LBB61_842: # %cond.load1273
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 320
+; CHECK-RV32-NEXT:    li a4, 319
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 1
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_843
+; CHECK-RV32-NEXT:    j .LBB61_342
+; CHECK-RV32-NEXT:  .LBB61_843: # %cond.load1277
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 321
+; CHECK-RV32-NEXT:    li a4, 320
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 2
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_844
+; CHECK-RV32-NEXT:    j .LBB61_343
+; CHECK-RV32-NEXT:  .LBB61_844: # %cond.load1281
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 322
+; CHECK-RV32-NEXT:    li a4, 321
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 4
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_845
+; CHECK-RV32-NEXT:    j .LBB61_344
+; CHECK-RV32-NEXT:  .LBB61_845: # %cond.load1285
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 323
+; CHECK-RV32-NEXT:    li a4, 322
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 8
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_846
+; CHECK-RV32-NEXT:    j .LBB61_345
+; CHECK-RV32-NEXT:  .LBB61_846: # %cond.load1289
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 324
+; CHECK-RV32-NEXT:    li a4, 323
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 16
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_847
+; CHECK-RV32-NEXT:    j .LBB61_346
+; CHECK-RV32-NEXT:  .LBB61_847: # %cond.load1293
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 325
+; CHECK-RV32-NEXT:    li a4, 324
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 32
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_848
+; CHECK-RV32-NEXT:    j .LBB61_347
+; CHECK-RV32-NEXT:  .LBB61_848: # %cond.load1297
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 326
+; CHECK-RV32-NEXT:    li a4, 325
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 64
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_849
+; CHECK-RV32-NEXT:    j .LBB61_348
+; CHECK-RV32-NEXT:  .LBB61_849: # %cond.load1301
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 327
+; CHECK-RV32-NEXT:    li a4, 326
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 128
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_850
+; CHECK-RV32-NEXT:    j .LBB61_349
+; CHECK-RV32-NEXT:  .LBB61_850: # %cond.load1305
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 328
+; CHECK-RV32-NEXT:    li a4, 327
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 256
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_851
+; CHECK-RV32-NEXT:    j .LBB61_350
+; CHECK-RV32-NEXT:  .LBB61_851: # %cond.load1309
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 329
+; CHECK-RV32-NEXT:    li a4, 328
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 512
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_852
+; CHECK-RV32-NEXT:    j .LBB61_351
+; CHECK-RV32-NEXT:  .LBB61_852: # %cond.load1313
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 330
+; CHECK-RV32-NEXT:    li a4, 329
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_853
+; CHECK-RV32-NEXT:    j .LBB61_352
+; CHECK-RV32-NEXT:  .LBB61_853: # %cond.load1317
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 331
+; CHECK-RV32-NEXT:    li a4, 330
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 20
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_854
+; CHECK-RV32-NEXT:    j .LBB61_353
+; CHECK-RV32-NEXT:  .LBB61_854: # %cond.load1321
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 332
+; CHECK-RV32-NEXT:    li a4, 331
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 19
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_855
+; CHECK-RV32-NEXT:    j .LBB61_354
+; CHECK-RV32-NEXT:  .LBB61_855: # %cond.load1325
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 333
+; CHECK-RV32-NEXT:    li a4, 332
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 18
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_856
+; CHECK-RV32-NEXT:    j .LBB61_355
+; CHECK-RV32-NEXT:  .LBB61_856: # %cond.load1329
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 334
+; CHECK-RV32-NEXT:    li a4, 333
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 17
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_857
+; CHECK-RV32-NEXT:    j .LBB61_356
+; CHECK-RV32-NEXT:  .LBB61_857: # %cond.load1333
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 335
+; CHECK-RV32-NEXT:    li a4, 334
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 16
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_858
+; CHECK-RV32-NEXT:    j .LBB61_357
+; CHECK-RV32-NEXT:  .LBB61_858: # %cond.load1337
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 336
+; CHECK-RV32-NEXT:    li a4, 335
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 15
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_859
+; CHECK-RV32-NEXT:    j .LBB61_358
+; CHECK-RV32-NEXT:  .LBB61_859: # %cond.load1341
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 337
+; CHECK-RV32-NEXT:    li a4, 336
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 14
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_860
+; CHECK-RV32-NEXT:    j .LBB61_359
+; CHECK-RV32-NEXT:  .LBB61_860: # %cond.load1345
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 338
+; CHECK-RV32-NEXT:    li a4, 337
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 13
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_861
+; CHECK-RV32-NEXT:    j .LBB61_360
+; CHECK-RV32-NEXT:  .LBB61_861: # %cond.load1349
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 339
+; CHECK-RV32-NEXT:    li a4, 338
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 12
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_862
+; CHECK-RV32-NEXT:    j .LBB61_361
+; CHECK-RV32-NEXT:  .LBB61_862: # %cond.load1353
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 340
+; CHECK-RV32-NEXT:    li a4, 339
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 11
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_863
+; CHECK-RV32-NEXT:    j .LBB61_362
+; CHECK-RV32-NEXT:  .LBB61_863: # %cond.load1357
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 341
+; CHECK-RV32-NEXT:    li a4, 340
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 10
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_864
+; CHECK-RV32-NEXT:    j .LBB61_363
+; CHECK-RV32-NEXT:  .LBB61_864: # %cond.load1361
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 342
+; CHECK-RV32-NEXT:    li a4, 341
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 9
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_865
+; CHECK-RV32-NEXT:    j .LBB61_364
+; CHECK-RV32-NEXT:  .LBB61_865: # %cond.load1365
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 343
+; CHECK-RV32-NEXT:    li a4, 342
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 8
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_866
+; CHECK-RV32-NEXT:    j .LBB61_365
+; CHECK-RV32-NEXT:  .LBB61_866: # %cond.load1369
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 344
+; CHECK-RV32-NEXT:    li a4, 343
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 7
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_867
+; CHECK-RV32-NEXT:    j .LBB61_366
+; CHECK-RV32-NEXT:  .LBB61_867: # %cond.load1373
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 345
+; CHECK-RV32-NEXT:    li a4, 344
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 6
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_868
+; CHECK-RV32-NEXT:    j .LBB61_367
+; CHECK-RV32-NEXT:  .LBB61_868: # %cond.load1377
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 346
+; CHECK-RV32-NEXT:    li a4, 345
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 5
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_869
+; CHECK-RV32-NEXT:    j .LBB61_368
+; CHECK-RV32-NEXT:  .LBB61_869: # %cond.load1381
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 347
+; CHECK-RV32-NEXT:    li a4, 346
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 4
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_870
+; CHECK-RV32-NEXT:    j .LBB61_369
+; CHECK-RV32-NEXT:  .LBB61_870: # %cond.load1385
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 348
+; CHECK-RV32-NEXT:    li a4, 347
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 3
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_871
+; CHECK-RV32-NEXT:    j .LBB61_370
+; CHECK-RV32-NEXT:  .LBB61_871: # %cond.load1389
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 349
+; CHECK-RV32-NEXT:    li a4, 348
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 2
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_1035
+; CHECK-RV32-NEXT:    j .LBB61_371
+; CHECK-RV32-NEXT:  .LBB61_1035: # %cond.load1389
+; CHECK-RV32-NEXT:    j .LBB61_372
+; CHECK-RV32-NEXT:  .LBB61_872: # %cond.load1401
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 352
+; CHECK-RV32-NEXT:    li a4, 351
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 1
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_873
+; CHECK-RV32-NEXT:    j .LBB61_376
+; CHECK-RV32-NEXT:  .LBB61_873: # %cond.load1405
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 353
+; CHECK-RV32-NEXT:    li a4, 352
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 2
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_874
+; CHECK-RV32-NEXT:    j .LBB61_377
+; CHECK-RV32-NEXT:  .LBB61_874: # %cond.load1409
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 354
+; CHECK-RV32-NEXT:    li a4, 353
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 4
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_875
+; CHECK-RV32-NEXT:    j .LBB61_378
+; CHECK-RV32-NEXT:  .LBB61_875: # %cond.load1413
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 355
+; CHECK-RV32-NEXT:    li a4, 354
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 8
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_876
+; CHECK-RV32-NEXT:    j .LBB61_379
+; CHECK-RV32-NEXT:  .LBB61_876: # %cond.load1417
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 356
+; CHECK-RV32-NEXT:    li a4, 355
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 16
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_877
+; CHECK-RV32-NEXT:    j .LBB61_380
+; CHECK-RV32-NEXT:  .LBB61_877: # %cond.load1421
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 357
+; CHECK-RV32-NEXT:    li a4, 356
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 32
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_878
+; CHECK-RV32-NEXT:    j .LBB61_381
+; CHECK-RV32-NEXT:  .LBB61_878: # %cond.load1425
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 358
+; CHECK-RV32-NEXT:    li a4, 357
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 64
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_879
+; CHECK-RV32-NEXT:    j .LBB61_382
+; CHECK-RV32-NEXT:  .LBB61_879: # %cond.load1429
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 359
+; CHECK-RV32-NEXT:    li a4, 358
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 128
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_880
+; CHECK-RV32-NEXT:    j .LBB61_383
+; CHECK-RV32-NEXT:  .LBB61_880: # %cond.load1433
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 360
+; CHECK-RV32-NEXT:    li a4, 359
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 256
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_881
+; CHECK-RV32-NEXT:    j .LBB61_384
+; CHECK-RV32-NEXT:  .LBB61_881: # %cond.load1437
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 361
+; CHECK-RV32-NEXT:    li a4, 360
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 512
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_882
+; CHECK-RV32-NEXT:    j .LBB61_385
+; CHECK-RV32-NEXT:  .LBB61_882: # %cond.load1441
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 362
+; CHECK-RV32-NEXT:    li a4, 361
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_883
+; CHECK-RV32-NEXT:    j .LBB61_386
+; CHECK-RV32-NEXT:  .LBB61_883: # %cond.load1445
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 363
+; CHECK-RV32-NEXT:    li a4, 362
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 20
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_884
+; CHECK-RV32-NEXT:    j .LBB61_387
+; CHECK-RV32-NEXT:  .LBB61_884: # %cond.load1449
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 364
+; CHECK-RV32-NEXT:    li a4, 363
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 19
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_885
+; CHECK-RV32-NEXT:    j .LBB61_388
+; CHECK-RV32-NEXT:  .LBB61_885: # %cond.load1453
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 365
+; CHECK-RV32-NEXT:    li a4, 364
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 18
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_886
+; CHECK-RV32-NEXT:    j .LBB61_389
+; CHECK-RV32-NEXT:  .LBB61_886: # %cond.load1457
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 366
+; CHECK-RV32-NEXT:    li a4, 365
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 17
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_887
+; CHECK-RV32-NEXT:    j .LBB61_390
+; CHECK-RV32-NEXT:  .LBB61_887: # %cond.load1461
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 367
+; CHECK-RV32-NEXT:    li a4, 366
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 16
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_888
+; CHECK-RV32-NEXT:    j .LBB61_391
+; CHECK-RV32-NEXT:  .LBB61_888: # %cond.load1465
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 368
+; CHECK-RV32-NEXT:    li a4, 367
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 15
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_889
+; CHECK-RV32-NEXT:    j .LBB61_392
+; CHECK-RV32-NEXT:  .LBB61_889: # %cond.load1469
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 369
+; CHECK-RV32-NEXT:    li a4, 368
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 14
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_890
+; CHECK-RV32-NEXT:    j .LBB61_393
+; CHECK-RV32-NEXT:  .LBB61_890: # %cond.load1473
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 370
+; CHECK-RV32-NEXT:    li a4, 369
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 13
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_891
+; CHECK-RV32-NEXT:    j .LBB61_394
+; CHECK-RV32-NEXT:  .LBB61_891: # %cond.load1477
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 371
+; CHECK-RV32-NEXT:    li a4, 370
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 12
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_892
+; CHECK-RV32-NEXT:    j .LBB61_395
+; CHECK-RV32-NEXT:  .LBB61_892: # %cond.load1481
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 372
+; CHECK-RV32-NEXT:    li a4, 371
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 11
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_893
+; CHECK-RV32-NEXT:    j .LBB61_396
+; CHECK-RV32-NEXT:  .LBB61_893: # %cond.load1485
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 373
+; CHECK-RV32-NEXT:    li a4, 372
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 10
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_894
+; CHECK-RV32-NEXT:    j .LBB61_397
+; CHECK-RV32-NEXT:  .LBB61_894: # %cond.load1489
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 374
+; CHECK-RV32-NEXT:    li a4, 373
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 9
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_895
+; CHECK-RV32-NEXT:    j .LBB61_398
+; CHECK-RV32-NEXT:  .LBB61_895: # %cond.load1493
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 375
+; CHECK-RV32-NEXT:    li a4, 374
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 8
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_896
+; CHECK-RV32-NEXT:    j .LBB61_399
+; CHECK-RV32-NEXT:  .LBB61_896: # %cond.load1497
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 376
+; CHECK-RV32-NEXT:    li a4, 375
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 7
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_897
+; CHECK-RV32-NEXT:    j .LBB61_400
+; CHECK-RV32-NEXT:  .LBB61_897: # %cond.load1501
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 377
+; CHECK-RV32-NEXT:    li a4, 376
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 6
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_898
+; CHECK-RV32-NEXT:    j .LBB61_401
+; CHECK-RV32-NEXT:  .LBB61_898: # %cond.load1505
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 378
+; CHECK-RV32-NEXT:    li a4, 377
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 5
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_899
+; CHECK-RV32-NEXT:    j .LBB61_402
+; CHECK-RV32-NEXT:  .LBB61_899: # %cond.load1509
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 379
+; CHECK-RV32-NEXT:    li a4, 378
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 4
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_900
+; CHECK-RV32-NEXT:    j .LBB61_403
+; CHECK-RV32-NEXT:  .LBB61_900: # %cond.load1513
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 380
+; CHECK-RV32-NEXT:    li a4, 379
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 3
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_901
+; CHECK-RV32-NEXT:    j .LBB61_404
+; CHECK-RV32-NEXT:  .LBB61_901: # %cond.load1517
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 381
+; CHECK-RV32-NEXT:    li a4, 380
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 2
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_1036
+; CHECK-RV32-NEXT:    j .LBB61_405
+; CHECK-RV32-NEXT:  .LBB61_1036: # %cond.load1517
+; CHECK-RV32-NEXT:    j .LBB61_406
+; CHECK-RV32-NEXT:  .LBB61_902: # %cond.load1529
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 384
+; CHECK-RV32-NEXT:    li a4, 383
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 1
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_903
+; CHECK-RV32-NEXT:    j .LBB61_410
+; CHECK-RV32-NEXT:  .LBB61_903: # %cond.load1533
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 385
+; CHECK-RV32-NEXT:    li a4, 384
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 2
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_904
+; CHECK-RV32-NEXT:    j .LBB61_411
+; CHECK-RV32-NEXT:  .LBB61_904: # %cond.load1537
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 386
+; CHECK-RV32-NEXT:    li a4, 385
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 4
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_905
+; CHECK-RV32-NEXT:    j .LBB61_412
+; CHECK-RV32-NEXT:  .LBB61_905: # %cond.load1541
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 387
+; CHECK-RV32-NEXT:    li a4, 386
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 8
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_906
+; CHECK-RV32-NEXT:    j .LBB61_413
+; CHECK-RV32-NEXT:  .LBB61_906: # %cond.load1545
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 388
+; CHECK-RV32-NEXT:    li a4, 387
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 16
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_907
+; CHECK-RV32-NEXT:    j .LBB61_414
+; CHECK-RV32-NEXT:  .LBB61_907: # %cond.load1549
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 389
+; CHECK-RV32-NEXT:    li a4, 388
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 32
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_908
+; CHECK-RV32-NEXT:    j .LBB61_415
+; CHECK-RV32-NEXT:  .LBB61_908: # %cond.load1553
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 390
+; CHECK-RV32-NEXT:    li a4, 389
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 64
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_909
+; CHECK-RV32-NEXT:    j .LBB61_416
+; CHECK-RV32-NEXT:  .LBB61_909: # %cond.load1557
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 391
+; CHECK-RV32-NEXT:    li a4, 390
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 128
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_910
+; CHECK-RV32-NEXT:    j .LBB61_417
+; CHECK-RV32-NEXT:  .LBB61_910: # %cond.load1561
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 392
+; CHECK-RV32-NEXT:    li a4, 391
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 256
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_911
+; CHECK-RV32-NEXT:    j .LBB61_418
+; CHECK-RV32-NEXT:  .LBB61_911: # %cond.load1565
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 393
+; CHECK-RV32-NEXT:    li a4, 392
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 512
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_912
+; CHECK-RV32-NEXT:    j .LBB61_419
+; CHECK-RV32-NEXT:  .LBB61_912: # %cond.load1569
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 394
+; CHECK-RV32-NEXT:    li a4, 393
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_913
+; CHECK-RV32-NEXT:    j .LBB61_420
+; CHECK-RV32-NEXT:  .LBB61_913: # %cond.load1573
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 395
+; CHECK-RV32-NEXT:    li a4, 394
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 20
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_914
+; CHECK-RV32-NEXT:    j .LBB61_421
+; CHECK-RV32-NEXT:  .LBB61_914: # %cond.load1577
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 396
+; CHECK-RV32-NEXT:    li a4, 395
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 19
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_915
+; CHECK-RV32-NEXT:    j .LBB61_422
+; CHECK-RV32-NEXT:  .LBB61_915: # %cond.load1581
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 397
+; CHECK-RV32-NEXT:    li a4, 396
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 18
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_916
+; CHECK-RV32-NEXT:    j .LBB61_423
+; CHECK-RV32-NEXT:  .LBB61_916: # %cond.load1585
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 398
+; CHECK-RV32-NEXT:    li a4, 397
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 17
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_917
+; CHECK-RV32-NEXT:    j .LBB61_424
+; CHECK-RV32-NEXT:  .LBB61_917: # %cond.load1589
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 399
+; CHECK-RV32-NEXT:    li a4, 398
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 16
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_918
+; CHECK-RV32-NEXT:    j .LBB61_425
+; CHECK-RV32-NEXT:  .LBB61_918: # %cond.load1593
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 400
+; CHECK-RV32-NEXT:    li a4, 399
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 15
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_919
+; CHECK-RV32-NEXT:    j .LBB61_426
+; CHECK-RV32-NEXT:  .LBB61_919: # %cond.load1597
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 401
+; CHECK-RV32-NEXT:    li a4, 400
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 14
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_920
+; CHECK-RV32-NEXT:    j .LBB61_427
+; CHECK-RV32-NEXT:  .LBB61_920: # %cond.load1601
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 402
+; CHECK-RV32-NEXT:    li a4, 401
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 13
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_921
+; CHECK-RV32-NEXT:    j .LBB61_428
+; CHECK-RV32-NEXT:  .LBB61_921: # %cond.load1605
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 403
+; CHECK-RV32-NEXT:    li a4, 402
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 12
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_922
+; CHECK-RV32-NEXT:    j .LBB61_429
+; CHECK-RV32-NEXT:  .LBB61_922: # %cond.load1609
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 404
+; CHECK-RV32-NEXT:    li a4, 403
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 11
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_923
+; CHECK-RV32-NEXT:    j .LBB61_430
+; CHECK-RV32-NEXT:  .LBB61_923: # %cond.load1613
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 405
+; CHECK-RV32-NEXT:    li a4, 404
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 10
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_924
+; CHECK-RV32-NEXT:    j .LBB61_431
+; CHECK-RV32-NEXT:  .LBB61_924: # %cond.load1617
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 406
+; CHECK-RV32-NEXT:    li a4, 405
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 9
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_925
+; CHECK-RV32-NEXT:    j .LBB61_432
+; CHECK-RV32-NEXT:  .LBB61_925: # %cond.load1621
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 407
+; CHECK-RV32-NEXT:    li a4, 406
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 8
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_926
+; CHECK-RV32-NEXT:    j .LBB61_433
+; CHECK-RV32-NEXT:  .LBB61_926: # %cond.load1625
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 408
+; CHECK-RV32-NEXT:    li a4, 407
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 7
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_927
+; CHECK-RV32-NEXT:    j .LBB61_434
+; CHECK-RV32-NEXT:  .LBB61_927: # %cond.load1629
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 409
+; CHECK-RV32-NEXT:    li a4, 408
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 6
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_928
+; CHECK-RV32-NEXT:    j .LBB61_435
+; CHECK-RV32-NEXT:  .LBB61_928: # %cond.load1633
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 410
+; CHECK-RV32-NEXT:    li a4, 409
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 5
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_929
+; CHECK-RV32-NEXT:    j .LBB61_436
+; CHECK-RV32-NEXT:  .LBB61_929: # %cond.load1637
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 411
+; CHECK-RV32-NEXT:    li a4, 410
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 4
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_930
+; CHECK-RV32-NEXT:    j .LBB61_437
+; CHECK-RV32-NEXT:  .LBB61_930: # %cond.load1641
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 412
+; CHECK-RV32-NEXT:    li a4, 411
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 3
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_931
+; CHECK-RV32-NEXT:    j .LBB61_438
+; CHECK-RV32-NEXT:  .LBB61_931: # %cond.load1645
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 413
+; CHECK-RV32-NEXT:    li a4, 412
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 2
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_1037
+; CHECK-RV32-NEXT:    j .LBB61_439
+; CHECK-RV32-NEXT:  .LBB61_1037: # %cond.load1645
+; CHECK-RV32-NEXT:    j .LBB61_440
+; CHECK-RV32-NEXT:  .LBB61_932: # %cond.load1657
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 416
+; CHECK-RV32-NEXT:    li a4, 415
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 1
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_933
+; CHECK-RV32-NEXT:    j .LBB61_444
+; CHECK-RV32-NEXT:  .LBB61_933: # %cond.load1661
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 417
+; CHECK-RV32-NEXT:    li a4, 416
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 2
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_934
+; CHECK-RV32-NEXT:    j .LBB61_445
+; CHECK-RV32-NEXT:  .LBB61_934: # %cond.load1665
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 418
+; CHECK-RV32-NEXT:    li a4, 417
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 4
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_935
+; CHECK-RV32-NEXT:    j .LBB61_446
+; CHECK-RV32-NEXT:  .LBB61_935: # %cond.load1669
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 419
+; CHECK-RV32-NEXT:    li a4, 418
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 8
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_936
+; CHECK-RV32-NEXT:    j .LBB61_447
+; CHECK-RV32-NEXT:  .LBB61_936: # %cond.load1673
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 420
+; CHECK-RV32-NEXT:    li a4, 419
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 16
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_937
+; CHECK-RV32-NEXT:    j .LBB61_448
+; CHECK-RV32-NEXT:  .LBB61_937: # %cond.load1677
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 421
+; CHECK-RV32-NEXT:    li a4, 420
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 32
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_938
+; CHECK-RV32-NEXT:    j .LBB61_449
+; CHECK-RV32-NEXT:  .LBB61_938: # %cond.load1681
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 422
+; CHECK-RV32-NEXT:    li a4, 421
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 64
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_939
+; CHECK-RV32-NEXT:    j .LBB61_450
+; CHECK-RV32-NEXT:  .LBB61_939: # %cond.load1685
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 423
+; CHECK-RV32-NEXT:    li a4, 422
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 128
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_940
+; CHECK-RV32-NEXT:    j .LBB61_451
+; CHECK-RV32-NEXT:  .LBB61_940: # %cond.load1689
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 424
+; CHECK-RV32-NEXT:    li a4, 423
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 256
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_941
+; CHECK-RV32-NEXT:    j .LBB61_452
+; CHECK-RV32-NEXT:  .LBB61_941: # %cond.load1693
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 425
+; CHECK-RV32-NEXT:    li a4, 424
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 512
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_942
+; CHECK-RV32-NEXT:    j .LBB61_453
+; CHECK-RV32-NEXT:  .LBB61_942: # %cond.load1697
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 426
+; CHECK-RV32-NEXT:    li a4, 425
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_943
+; CHECK-RV32-NEXT:    j .LBB61_454
+; CHECK-RV32-NEXT:  .LBB61_943: # %cond.load1701
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 427
+; CHECK-RV32-NEXT:    li a4, 426
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 20
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_944
+; CHECK-RV32-NEXT:    j .LBB61_455
+; CHECK-RV32-NEXT:  .LBB61_944: # %cond.load1705
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 428
+; CHECK-RV32-NEXT:    li a4, 427
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 19
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_945
+; CHECK-RV32-NEXT:    j .LBB61_456
+; CHECK-RV32-NEXT:  .LBB61_945: # %cond.load1709
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 429
+; CHECK-RV32-NEXT:    li a4, 428
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 18
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_946
+; CHECK-RV32-NEXT:    j .LBB61_457
+; CHECK-RV32-NEXT:  .LBB61_946: # %cond.load1713
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 430
+; CHECK-RV32-NEXT:    li a4, 429
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 17
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_947
+; CHECK-RV32-NEXT:    j .LBB61_458
+; CHECK-RV32-NEXT:  .LBB61_947: # %cond.load1717
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 431
+; CHECK-RV32-NEXT:    li a4, 430
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 16
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_948
+; CHECK-RV32-NEXT:    j .LBB61_459
+; CHECK-RV32-NEXT:  .LBB61_948: # %cond.load1721
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 432
+; CHECK-RV32-NEXT:    li a4, 431
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 15
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_949
+; CHECK-RV32-NEXT:    j .LBB61_460
+; CHECK-RV32-NEXT:  .LBB61_949: # %cond.load1725
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 433
+; CHECK-RV32-NEXT:    li a4, 432
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 14
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_950
+; CHECK-RV32-NEXT:    j .LBB61_461
+; CHECK-RV32-NEXT:  .LBB61_950: # %cond.load1729
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 434
+; CHECK-RV32-NEXT:    li a4, 433
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 13
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_951
+; CHECK-RV32-NEXT:    j .LBB61_462
+; CHECK-RV32-NEXT:  .LBB61_951: # %cond.load1733
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 435
+; CHECK-RV32-NEXT:    li a4, 434
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 12
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_952
+; CHECK-RV32-NEXT:    j .LBB61_463
+; CHECK-RV32-NEXT:  .LBB61_952: # %cond.load1737
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 436
+; CHECK-RV32-NEXT:    li a4, 435
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 11
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_953
+; CHECK-RV32-NEXT:    j .LBB61_464
+; CHECK-RV32-NEXT:  .LBB61_953: # %cond.load1741
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 437
+; CHECK-RV32-NEXT:    li a4, 436
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 10
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_954
+; CHECK-RV32-NEXT:    j .LBB61_465
+; CHECK-RV32-NEXT:  .LBB61_954: # %cond.load1745
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 438
+; CHECK-RV32-NEXT:    li a4, 437
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 9
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_955
+; CHECK-RV32-NEXT:    j .LBB61_466
+; CHECK-RV32-NEXT:  .LBB61_955: # %cond.load1749
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 439
+; CHECK-RV32-NEXT:    li a4, 438
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 8
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_956
+; CHECK-RV32-NEXT:    j .LBB61_467
+; CHECK-RV32-NEXT:  .LBB61_956: # %cond.load1753
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 440
+; CHECK-RV32-NEXT:    li a4, 439
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 7
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_957
+; CHECK-RV32-NEXT:    j .LBB61_468
+; CHECK-RV32-NEXT:  .LBB61_957: # %cond.load1757
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 441
+; CHECK-RV32-NEXT:    li a4, 440
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 6
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_958
+; CHECK-RV32-NEXT:    j .LBB61_469
+; CHECK-RV32-NEXT:  .LBB61_958: # %cond.load1761
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 442
+; CHECK-RV32-NEXT:    li a4, 441
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 5
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_959
+; CHECK-RV32-NEXT:    j .LBB61_470
+; CHECK-RV32-NEXT:  .LBB61_959: # %cond.load1765
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 443
+; CHECK-RV32-NEXT:    li a4, 442
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 4
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_960
+; CHECK-RV32-NEXT:    j .LBB61_471
+; CHECK-RV32-NEXT:  .LBB61_960: # %cond.load1769
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 444
+; CHECK-RV32-NEXT:    li a4, 443
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 3
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_961
+; CHECK-RV32-NEXT:    j .LBB61_472
+; CHECK-RV32-NEXT:  .LBB61_961: # %cond.load1773
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 445
+; CHECK-RV32-NEXT:    li a4, 444
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 2
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_1038
+; CHECK-RV32-NEXT:    j .LBB61_473
+; CHECK-RV32-NEXT:  .LBB61_1038: # %cond.load1773
+; CHECK-RV32-NEXT:    j .LBB61_474
+; CHECK-RV32-NEXT:  .LBB61_962: # %cond.load1785
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 448
+; CHECK-RV32-NEXT:    li a4, 447
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 1
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_963
+; CHECK-RV32-NEXT:    j .LBB61_478
+; CHECK-RV32-NEXT:  .LBB61_963: # %cond.load1789
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 449
+; CHECK-RV32-NEXT:    li a4, 448
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 2
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_964
+; CHECK-RV32-NEXT:    j .LBB61_479
+; CHECK-RV32-NEXT:  .LBB61_964: # %cond.load1793
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 450
+; CHECK-RV32-NEXT:    li a4, 449
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 4
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_965
+; CHECK-RV32-NEXT:    j .LBB61_480
+; CHECK-RV32-NEXT:  .LBB61_965: # %cond.load1797
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 451
+; CHECK-RV32-NEXT:    li a4, 450
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 8
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_966
+; CHECK-RV32-NEXT:    j .LBB61_481
+; CHECK-RV32-NEXT:  .LBB61_966: # %cond.load1801
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 452
+; CHECK-RV32-NEXT:    li a4, 451
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 16
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_967
+; CHECK-RV32-NEXT:    j .LBB61_482
+; CHECK-RV32-NEXT:  .LBB61_967: # %cond.load1805
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 453
+; CHECK-RV32-NEXT:    li a4, 452
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 32
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_968
+; CHECK-RV32-NEXT:    j .LBB61_483
+; CHECK-RV32-NEXT:  .LBB61_968: # %cond.load1809
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 454
+; CHECK-RV32-NEXT:    li a4, 453
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 64
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_969
+; CHECK-RV32-NEXT:    j .LBB61_484
+; CHECK-RV32-NEXT:  .LBB61_969: # %cond.load1813
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 455
+; CHECK-RV32-NEXT:    li a4, 454
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 128
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_970
+; CHECK-RV32-NEXT:    j .LBB61_485
+; CHECK-RV32-NEXT:  .LBB61_970: # %cond.load1817
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 456
+; CHECK-RV32-NEXT:    li a4, 455
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 256
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_971
+; CHECK-RV32-NEXT:    j .LBB61_486
+; CHECK-RV32-NEXT:  .LBB61_971: # %cond.load1821
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 457
+; CHECK-RV32-NEXT:    li a4, 456
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 512
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_972
+; CHECK-RV32-NEXT:    j .LBB61_487
+; CHECK-RV32-NEXT:  .LBB61_972: # %cond.load1825
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 458
+; CHECK-RV32-NEXT:    li a4, 457
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_973
+; CHECK-RV32-NEXT:    j .LBB61_488
+; CHECK-RV32-NEXT:  .LBB61_973: # %cond.load1829
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 459
+; CHECK-RV32-NEXT:    li a4, 458
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 20
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_974
+; CHECK-RV32-NEXT:    j .LBB61_489
+; CHECK-RV32-NEXT:  .LBB61_974: # %cond.load1833
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 460
+; CHECK-RV32-NEXT:    li a4, 459
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 19
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_975
+; CHECK-RV32-NEXT:    j .LBB61_490
+; CHECK-RV32-NEXT:  .LBB61_975: # %cond.load1837
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 461
+; CHECK-RV32-NEXT:    li a4, 460
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 18
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_976
+; CHECK-RV32-NEXT:    j .LBB61_491
+; CHECK-RV32-NEXT:  .LBB61_976: # %cond.load1841
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 462
+; CHECK-RV32-NEXT:    li a4, 461
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 17
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_977
+; CHECK-RV32-NEXT:    j .LBB61_492
+; CHECK-RV32-NEXT:  .LBB61_977: # %cond.load1845
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 463
+; CHECK-RV32-NEXT:    li a4, 462
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 16
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_978
+; CHECK-RV32-NEXT:    j .LBB61_493
+; CHECK-RV32-NEXT:  .LBB61_978: # %cond.load1849
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 464
+; CHECK-RV32-NEXT:    li a4, 463
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 15
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_979
+; CHECK-RV32-NEXT:    j .LBB61_494
+; CHECK-RV32-NEXT:  .LBB61_979: # %cond.load1853
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 465
+; CHECK-RV32-NEXT:    li a4, 464
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 14
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_980
+; CHECK-RV32-NEXT:    j .LBB61_495
+; CHECK-RV32-NEXT:  .LBB61_980: # %cond.load1857
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 466
+; CHECK-RV32-NEXT:    li a4, 465
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 13
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_981
+; CHECK-RV32-NEXT:    j .LBB61_496
+; CHECK-RV32-NEXT:  .LBB61_981: # %cond.load1861
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 467
+; CHECK-RV32-NEXT:    li a4, 466
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 12
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_982
+; CHECK-RV32-NEXT:    j .LBB61_497
+; CHECK-RV32-NEXT:  .LBB61_982: # %cond.load1865
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 468
+; CHECK-RV32-NEXT:    li a4, 467
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 11
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_983
+; CHECK-RV32-NEXT:    j .LBB61_498
+; CHECK-RV32-NEXT:  .LBB61_983: # %cond.load1869
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 469
+; CHECK-RV32-NEXT:    li a4, 468
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 10
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_984
+; CHECK-RV32-NEXT:    j .LBB61_499
+; CHECK-RV32-NEXT:  .LBB61_984: # %cond.load1873
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 470
+; CHECK-RV32-NEXT:    li a4, 469
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 9
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_985
+; CHECK-RV32-NEXT:    j .LBB61_500
+; CHECK-RV32-NEXT:  .LBB61_985: # %cond.load1877
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 471
+; CHECK-RV32-NEXT:    li a4, 470
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 8
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_986
+; CHECK-RV32-NEXT:    j .LBB61_501
+; CHECK-RV32-NEXT:  .LBB61_986: # %cond.load1881
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 472
+; CHECK-RV32-NEXT:    li a4, 471
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 7
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_987
+; CHECK-RV32-NEXT:    j .LBB61_502
+; CHECK-RV32-NEXT:  .LBB61_987: # %cond.load1885
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 473
+; CHECK-RV32-NEXT:    li a4, 472
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 6
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_988
+; CHECK-RV32-NEXT:    j .LBB61_503
+; CHECK-RV32-NEXT:  .LBB61_988: # %cond.load1889
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 474
+; CHECK-RV32-NEXT:    li a4, 473
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 5
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_989
+; CHECK-RV32-NEXT:    j .LBB61_504
+; CHECK-RV32-NEXT:  .LBB61_989: # %cond.load1893
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 475
+; CHECK-RV32-NEXT:    li a4, 474
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 4
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_990
+; CHECK-RV32-NEXT:    j .LBB61_505
+; CHECK-RV32-NEXT:  .LBB61_990: # %cond.load1897
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 476
+; CHECK-RV32-NEXT:    li a4, 475
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 3
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_991
+; CHECK-RV32-NEXT:    j .LBB61_506
+; CHECK-RV32-NEXT:  .LBB61_991: # %cond.load1901
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 477
+; CHECK-RV32-NEXT:    li a4, 476
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 2
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_1039
+; CHECK-RV32-NEXT:    j .LBB61_507
+; CHECK-RV32-NEXT:  .LBB61_1039: # %cond.load1901
+; CHECK-RV32-NEXT:    j .LBB61_508
+; CHECK-RV32-NEXT:  .LBB61_992: # %cond.load1913
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 480
+; CHECK-RV32-NEXT:    li a3, 479
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a1, 1
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_993
+; CHECK-RV32-NEXT:    j .LBB61_512
+; CHECK-RV32-NEXT:  .LBB61_993: # %cond.load1917
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 481
+; CHECK-RV32-NEXT:    li a3, 480
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a1, 2
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_994
+; CHECK-RV32-NEXT:    j .LBB61_513
+; CHECK-RV32-NEXT:  .LBB61_994: # %cond.load1921
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 482
+; CHECK-RV32-NEXT:    li a3, 481
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a1, 4
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_995
+; CHECK-RV32-NEXT:    j .LBB61_514
+; CHECK-RV32-NEXT:  .LBB61_995: # %cond.load1925
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 483
+; CHECK-RV32-NEXT:    li a3, 482
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a1, 8
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_996
+; CHECK-RV32-NEXT:    j .LBB61_515
+; CHECK-RV32-NEXT:  .LBB61_996: # %cond.load1929
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 484
+; CHECK-RV32-NEXT:    li a3, 483
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a1, 16
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_997
+; CHECK-RV32-NEXT:    j .LBB61_516
+; CHECK-RV32-NEXT:  .LBB61_997: # %cond.load1933
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 485
+; CHECK-RV32-NEXT:    li a3, 484
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a1, 32
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_998
+; CHECK-RV32-NEXT:    j .LBB61_517
+; CHECK-RV32-NEXT:  .LBB61_998: # %cond.load1937
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 486
+; CHECK-RV32-NEXT:    li a3, 485
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a1, 64
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_999
+; CHECK-RV32-NEXT:    j .LBB61_518
+; CHECK-RV32-NEXT:  .LBB61_999: # %cond.load1941
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 487
+; CHECK-RV32-NEXT:    li a3, 486
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a1, 128
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_1000
+; CHECK-RV32-NEXT:    j .LBB61_519
+; CHECK-RV32-NEXT:  .LBB61_1000: # %cond.load1945
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 488
+; CHECK-RV32-NEXT:    li a3, 487
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a1, 256
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_1001
+; CHECK-RV32-NEXT:    j .LBB61_520
+; CHECK-RV32-NEXT:  .LBB61_1001: # %cond.load1949
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 489
+; CHECK-RV32-NEXT:    li a3, 488
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a1, 512
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_1002
+; CHECK-RV32-NEXT:    j .LBB61_521
+; CHECK-RV32-NEXT:  .LBB61_1002: # %cond.load1953
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 490
+; CHECK-RV32-NEXT:    li a3, 489
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a1, 1024
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_1003
+; CHECK-RV32-NEXT:    j .LBB61_522
+; CHECK-RV32-NEXT:  .LBB61_1003: # %cond.load1957
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 491
+; CHECK-RV32-NEXT:    li a3, 490
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a1, 20
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_1004
+; CHECK-RV32-NEXT:    j .LBB61_523
+; CHECK-RV32-NEXT:  .LBB61_1004: # %cond.load1961
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 492
+; CHECK-RV32-NEXT:    li a3, 491
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a1, 19
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_1005
+; CHECK-RV32-NEXT:    j .LBB61_524
+; CHECK-RV32-NEXT:  .LBB61_1005: # %cond.load1965
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 493
+; CHECK-RV32-NEXT:    li a3, 492
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a1, 18
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_1006
+; CHECK-RV32-NEXT:    j .LBB61_525
+; CHECK-RV32-NEXT:  .LBB61_1006: # %cond.load1969
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 494
+; CHECK-RV32-NEXT:    li a3, 493
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a1, 17
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_1007
+; CHECK-RV32-NEXT:    j .LBB61_526
+; CHECK-RV32-NEXT:  .LBB61_1007: # %cond.load1973
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 495
+; CHECK-RV32-NEXT:    li a3, 494
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a1, 16
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_1008
+; CHECK-RV32-NEXT:    j .LBB61_527
+; CHECK-RV32-NEXT:  .LBB61_1008: # %cond.load1977
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 496
+; CHECK-RV32-NEXT:    li a3, 495
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a1, 15
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_1009
+; CHECK-RV32-NEXT:    j .LBB61_528
+; CHECK-RV32-NEXT:  .LBB61_1009: # %cond.load1981
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 497
+; CHECK-RV32-NEXT:    li a3, 496
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a1, 14
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_1010
+; CHECK-RV32-NEXT:    j .LBB61_529
+; CHECK-RV32-NEXT:  .LBB61_1010: # %cond.load1985
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 498
+; CHECK-RV32-NEXT:    li a3, 497
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a1, 13
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_1011
+; CHECK-RV32-NEXT:    j .LBB61_530
+; CHECK-RV32-NEXT:  .LBB61_1011: # %cond.load1989
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 499
+; CHECK-RV32-NEXT:    li a3, 498
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a1, 12
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_1012
+; CHECK-RV32-NEXT:    j .LBB61_531
+; CHECK-RV32-NEXT:  .LBB61_1012: # %cond.load1993
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 500
+; CHECK-RV32-NEXT:    li a3, 499
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a1, 11
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_1013
+; CHECK-RV32-NEXT:    j .LBB61_532
+; CHECK-RV32-NEXT:  .LBB61_1013: # %cond.load1997
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 501
+; CHECK-RV32-NEXT:    li a3, 500
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a1, 10
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_1014
+; CHECK-RV32-NEXT:    j .LBB61_533
+; CHECK-RV32-NEXT:  .LBB61_1014: # %cond.load2001
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 502
+; CHECK-RV32-NEXT:    li a3, 501
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a1, 9
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_1015
+; CHECK-RV32-NEXT:    j .LBB61_534
+; CHECK-RV32-NEXT:  .LBB61_1015: # %cond.load2005
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 503
+; CHECK-RV32-NEXT:    li a3, 502
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a1, 8
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_1016
+; CHECK-RV32-NEXT:    j .LBB61_535
+; CHECK-RV32-NEXT:  .LBB61_1016: # %cond.load2009
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 504
+; CHECK-RV32-NEXT:    li a3, 503
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a1, 7
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_1017
+; CHECK-RV32-NEXT:    j .LBB61_536
+; CHECK-RV32-NEXT:  .LBB61_1017: # %cond.load2013
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 505
+; CHECK-RV32-NEXT:    li a3, 504
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a1, 6
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_1018
+; CHECK-RV32-NEXT:    j .LBB61_537
+; CHECK-RV32-NEXT:  .LBB61_1018: # %cond.load2017
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 506
+; CHECK-RV32-NEXT:    li a3, 505
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a1, 5
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_1019
+; CHECK-RV32-NEXT:    j .LBB61_538
+; CHECK-RV32-NEXT:  .LBB61_1019: # %cond.load2021
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 507
+; CHECK-RV32-NEXT:    li a3, 506
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a1, 4
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_1020
+; CHECK-RV32-NEXT:    j .LBB61_539
+; CHECK-RV32-NEXT:  .LBB61_1020: # %cond.load2025
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 508
+; CHECK-RV32-NEXT:    li a3, 507
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a1, 3
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_1021
+; CHECK-RV32-NEXT:    j .LBB61_540
+; CHECK-RV32-NEXT:  .LBB61_1021: # %cond.load2029
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 509
+; CHECK-RV32-NEXT:    li a3, 508
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a1, 2
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_1022
+; CHECK-RV32-NEXT:    j .LBB61_541
+; CHECK-RV32-NEXT:  .LBB61_1022: # %cond.load2033
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 510
+; CHECK-RV32-NEXT:    li a3, 509
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a1, 1
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_1023
+; CHECK-RV32-NEXT:    j .LBB61_542
+; CHECK-RV32-NEXT:  .LBB61_1023: # %cond.load2037
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 511
+; CHECK-RV32-NEXT:    li a3, 510
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    bltz a1, .LBB61_1024
+; CHECK-RV32-NEXT:    j .LBB61_543
+; CHECK-RV32-NEXT:  .LBB61_1024: # %cond.load2041
+; CHECK-RV32-NEXT:    lbu a0, 0(a0)
+; CHECK-RV32-NEXT:    li a1, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a0
+; CHECK-RV32-NEXT:    li a0, 511
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a0
+; CHECK-RV32-NEXT:    ret
+;
+; CHECK-RV64-LABEL: test_expandload_v512i8_vlen512:
+; CHECK-RV64:       # %bb.0:
+; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.x.s a2, v0
+; CHECK-RV64-NEXT:    andi a1, a2, 1
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_1
+; CHECK-RV64-NEXT:    j .LBB61_527
+; CHECK-RV64-NEXT:  .LBB61_1: # %else
+; CHECK-RV64-NEXT:    andi a1, a2, 2
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_2
+; CHECK-RV64-NEXT:    j .LBB61_528
+; CHECK-RV64-NEXT:  .LBB61_2: # %else2
+; CHECK-RV64-NEXT:    andi a1, a2, 4
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_3
+; CHECK-RV64-NEXT:    j .LBB61_529
+; CHECK-RV64-NEXT:  .LBB61_3: # %else6
+; CHECK-RV64-NEXT:    andi a1, a2, 8
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_4
+; CHECK-RV64-NEXT:    j .LBB61_530
+; CHECK-RV64-NEXT:  .LBB61_4: # %else10
+; CHECK-RV64-NEXT:    andi a1, a2, 16
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_5
+; CHECK-RV64-NEXT:    j .LBB61_531
+; CHECK-RV64-NEXT:  .LBB61_5: # %else14
+; CHECK-RV64-NEXT:    andi a1, a2, 32
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_6
+; CHECK-RV64-NEXT:    j .LBB61_532
+; CHECK-RV64-NEXT:  .LBB61_6: # %else18
+; CHECK-RV64-NEXT:    andi a1, a2, 64
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_7
+; CHECK-RV64-NEXT:    j .LBB61_533
+; CHECK-RV64-NEXT:  .LBB61_7: # %else22
+; CHECK-RV64-NEXT:    andi a1, a2, 128
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_8
+; CHECK-RV64-NEXT:    j .LBB61_534
+; CHECK-RV64-NEXT:  .LBB61_8: # %else26
+; CHECK-RV64-NEXT:    andi a1, a2, 256
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_9
+; CHECK-RV64-NEXT:    j .LBB61_535
+; CHECK-RV64-NEXT:  .LBB61_9: # %else30
+; CHECK-RV64-NEXT:    andi a1, a2, 512
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_10
+; CHECK-RV64-NEXT:    j .LBB61_536
+; CHECK-RV64-NEXT:  .LBB61_10: # %else34
+; CHECK-RV64-NEXT:    andi a1, a2, 1024
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_11
+; CHECK-RV64-NEXT:    j .LBB61_537
+; CHECK-RV64-NEXT:  .LBB61_11: # %else38
+; CHECK-RV64-NEXT:    slli a1, a2, 52
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_12
+; CHECK-RV64-NEXT:    j .LBB61_538
+; CHECK-RV64-NEXT:  .LBB61_12: # %else42
+; CHECK-RV64-NEXT:    slli a1, a2, 51
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_13
+; CHECK-RV64-NEXT:    j .LBB61_539
+; CHECK-RV64-NEXT:  .LBB61_13: # %else46
+; CHECK-RV64-NEXT:    slli a1, a2, 50
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_14
+; CHECK-RV64-NEXT:    j .LBB61_540
+; CHECK-RV64-NEXT:  .LBB61_14: # %else50
+; CHECK-RV64-NEXT:    slli a1, a2, 49
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_15
+; CHECK-RV64-NEXT:    j .LBB61_541
+; CHECK-RV64-NEXT:  .LBB61_15: # %else54
+; CHECK-RV64-NEXT:    slli a1, a2, 48
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_16
+; CHECK-RV64-NEXT:    j .LBB61_542
+; CHECK-RV64-NEXT:  .LBB61_16: # %else58
+; CHECK-RV64-NEXT:    slli a1, a2, 47
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_17
+; CHECK-RV64-NEXT:    j .LBB61_543
+; CHECK-RV64-NEXT:  .LBB61_17: # %else62
+; CHECK-RV64-NEXT:    slli a1, a2, 46
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_18
+; CHECK-RV64-NEXT:    j .LBB61_544
+; CHECK-RV64-NEXT:  .LBB61_18: # %else66
+; CHECK-RV64-NEXT:    slli a1, a2, 45
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_19
+; CHECK-RV64-NEXT:    j .LBB61_545
+; CHECK-RV64-NEXT:  .LBB61_19: # %else70
+; CHECK-RV64-NEXT:    slli a1, a2, 44
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_20
+; CHECK-RV64-NEXT:    j .LBB61_546
+; CHECK-RV64-NEXT:  .LBB61_20: # %else74
+; CHECK-RV64-NEXT:    slli a1, a2, 43
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_21
+; CHECK-RV64-NEXT:    j .LBB61_547
+; CHECK-RV64-NEXT:  .LBB61_21: # %else78
+; CHECK-RV64-NEXT:    slli a1, a2, 42
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_22
+; CHECK-RV64-NEXT:    j .LBB61_548
+; CHECK-RV64-NEXT:  .LBB61_22: # %else82
+; CHECK-RV64-NEXT:    slli a1, a2, 41
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_23
+; CHECK-RV64-NEXT:    j .LBB61_549
+; CHECK-RV64-NEXT:  .LBB61_23: # %else86
+; CHECK-RV64-NEXT:    slli a1, a2, 40
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_24
+; CHECK-RV64-NEXT:    j .LBB61_550
+; CHECK-RV64-NEXT:  .LBB61_24: # %else90
+; CHECK-RV64-NEXT:    slli a1, a2, 39
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_25
+; CHECK-RV64-NEXT:    j .LBB61_551
+; CHECK-RV64-NEXT:  .LBB61_25: # %else94
+; CHECK-RV64-NEXT:    slli a1, a2, 38
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_26
+; CHECK-RV64-NEXT:    j .LBB61_552
+; CHECK-RV64-NEXT:  .LBB61_26: # %else98
+; CHECK-RV64-NEXT:    slli a1, a2, 37
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_27
+; CHECK-RV64-NEXT:    j .LBB61_553
+; CHECK-RV64-NEXT:  .LBB61_27: # %else102
+; CHECK-RV64-NEXT:    slli a1, a2, 36
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_28
+; CHECK-RV64-NEXT:    j .LBB61_554
+; CHECK-RV64-NEXT:  .LBB61_28: # %else106
+; CHECK-RV64-NEXT:    slli a1, a2, 35
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_29
+; CHECK-RV64-NEXT:    j .LBB61_555
+; CHECK-RV64-NEXT:  .LBB61_29: # %else110
+; CHECK-RV64-NEXT:    slli a1, a2, 34
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_30
+; CHECK-RV64-NEXT:    j .LBB61_556
+; CHECK-RV64-NEXT:  .LBB61_30: # %else114
+; CHECK-RV64-NEXT:    slli a1, a2, 33
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_31
+; CHECK-RV64-NEXT:    j .LBB61_557
+; CHECK-RV64-NEXT:  .LBB61_31: # %else118
+; CHECK-RV64-NEXT:    slli a1, a2, 32
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_32
+; CHECK-RV64-NEXT:    j .LBB61_558
+; CHECK-RV64-NEXT:  .LBB61_32: # %else122
+; CHECK-RV64-NEXT:    slli a1, a2, 31
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_33
+; CHECK-RV64-NEXT:    j .LBB61_559
+; CHECK-RV64-NEXT:  .LBB61_33: # %else126
+; CHECK-RV64-NEXT:    slli a1, a2, 30
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_34
+; CHECK-RV64-NEXT:    j .LBB61_560
+; CHECK-RV64-NEXT:  .LBB61_34: # %else130
+; CHECK-RV64-NEXT:    slli a1, a2, 29
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_35
+; CHECK-RV64-NEXT:    j .LBB61_561
+; CHECK-RV64-NEXT:  .LBB61_35: # %else134
+; CHECK-RV64-NEXT:    slli a1, a2, 28
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_36
+; CHECK-RV64-NEXT:    j .LBB61_562
+; CHECK-RV64-NEXT:  .LBB61_36: # %else138
+; CHECK-RV64-NEXT:    slli a1, a2, 27
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_37
+; CHECK-RV64-NEXT:    j .LBB61_563
+; CHECK-RV64-NEXT:  .LBB61_37: # %else142
+; CHECK-RV64-NEXT:    slli a1, a2, 26
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_38
+; CHECK-RV64-NEXT:    j .LBB61_564
+; CHECK-RV64-NEXT:  .LBB61_38: # %else146
+; CHECK-RV64-NEXT:    slli a1, a2, 25
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_39
+; CHECK-RV64-NEXT:    j .LBB61_565
+; CHECK-RV64-NEXT:  .LBB61_39: # %else150
+; CHECK-RV64-NEXT:    slli a1, a2, 24
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_40
+; CHECK-RV64-NEXT:    j .LBB61_566
+; CHECK-RV64-NEXT:  .LBB61_40: # %else154
+; CHECK-RV64-NEXT:    slli a1, a2, 23
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_41
+; CHECK-RV64-NEXT:    j .LBB61_567
+; CHECK-RV64-NEXT:  .LBB61_41: # %else158
+; CHECK-RV64-NEXT:    slli a1, a2, 22
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_42
+; CHECK-RV64-NEXT:    j .LBB61_568
+; CHECK-RV64-NEXT:  .LBB61_42: # %else162
+; CHECK-RV64-NEXT:    slli a1, a2, 21
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_43
+; CHECK-RV64-NEXT:    j .LBB61_569
+; CHECK-RV64-NEXT:  .LBB61_43: # %else166
+; CHECK-RV64-NEXT:    slli a1, a2, 20
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_44
+; CHECK-RV64-NEXT:    j .LBB61_570
+; CHECK-RV64-NEXT:  .LBB61_44: # %else170
+; CHECK-RV64-NEXT:    slli a1, a2, 19
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_45
+; CHECK-RV64-NEXT:    j .LBB61_571
+; CHECK-RV64-NEXT:  .LBB61_45: # %else174
+; CHECK-RV64-NEXT:    slli a1, a2, 18
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_46
+; CHECK-RV64-NEXT:    j .LBB61_572
+; CHECK-RV64-NEXT:  .LBB61_46: # %else178
+; CHECK-RV64-NEXT:    slli a1, a2, 17
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_47
+; CHECK-RV64-NEXT:    j .LBB61_573
+; CHECK-RV64-NEXT:  .LBB61_47: # %else182
+; CHECK-RV64-NEXT:    slli a1, a2, 16
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_48
+; CHECK-RV64-NEXT:    j .LBB61_574
+; CHECK-RV64-NEXT:  .LBB61_48: # %else186
+; CHECK-RV64-NEXT:    slli a1, a2, 15
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_49
+; CHECK-RV64-NEXT:    j .LBB61_575
+; CHECK-RV64-NEXT:  .LBB61_49: # %else190
+; CHECK-RV64-NEXT:    slli a1, a2, 14
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_50
+; CHECK-RV64-NEXT:    j .LBB61_576
+; CHECK-RV64-NEXT:  .LBB61_50: # %else194
+; CHECK-RV64-NEXT:    slli a1, a2, 13
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_51
+; CHECK-RV64-NEXT:    j .LBB61_577
+; CHECK-RV64-NEXT:  .LBB61_51: # %else198
+; CHECK-RV64-NEXT:    slli a1, a2, 12
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_52
+; CHECK-RV64-NEXT:    j .LBB61_578
+; CHECK-RV64-NEXT:  .LBB61_52: # %else202
+; CHECK-RV64-NEXT:    slli a1, a2, 11
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_53
+; CHECK-RV64-NEXT:    j .LBB61_579
+; CHECK-RV64-NEXT:  .LBB61_53: # %else206
+; CHECK-RV64-NEXT:    slli a1, a2, 10
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_54
+; CHECK-RV64-NEXT:    j .LBB61_580
+; CHECK-RV64-NEXT:  .LBB61_54: # %else210
+; CHECK-RV64-NEXT:    slli a1, a2, 9
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_55
+; CHECK-RV64-NEXT:    j .LBB61_581
+; CHECK-RV64-NEXT:  .LBB61_55: # %else214
+; CHECK-RV64-NEXT:    slli a1, a2, 8
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_56
+; CHECK-RV64-NEXT:    j .LBB61_582
+; CHECK-RV64-NEXT:  .LBB61_56: # %else218
+; CHECK-RV64-NEXT:    slli a1, a2, 7
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_57
+; CHECK-RV64-NEXT:    j .LBB61_583
+; CHECK-RV64-NEXT:  .LBB61_57: # %else222
+; CHECK-RV64-NEXT:    slli a1, a2, 6
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_58
+; CHECK-RV64-NEXT:    j .LBB61_584
+; CHECK-RV64-NEXT:  .LBB61_58: # %else226
+; CHECK-RV64-NEXT:    slli a1, a2, 5
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_59
+; CHECK-RV64-NEXT:    j .LBB61_585
+; CHECK-RV64-NEXT:  .LBB61_59: # %else230
+; CHECK-RV64-NEXT:    slli a1, a2, 4
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_60
+; CHECK-RV64-NEXT:    j .LBB61_586
+; CHECK-RV64-NEXT:  .LBB61_60: # %else234
+; CHECK-RV64-NEXT:    slli a1, a2, 3
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_61
+; CHECK-RV64-NEXT:    j .LBB61_587
+; CHECK-RV64-NEXT:  .LBB61_61: # %else238
+; CHECK-RV64-NEXT:    slli a1, a2, 2
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_63
+; CHECK-RV64-NEXT:  .LBB61_62: # %cond.load241
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 62
+; CHECK-RV64-NEXT:    li a3, 61
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:  .LBB61_63: # %else242
+; CHECK-RV64-NEXT:    slli a1, a2, 1
+; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV64-NEXT:    vslidedown.vi v16, v0, 1
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_65
+; CHECK-RV64-NEXT:  # %bb.64: # %cond.load245
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vmv.s.x v17, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 63
+; CHECK-RV64-NEXT:    li a3, 62
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v17, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:  .LBB61_65: # %else246
+; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.x.s a1, v16
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_66
+; CHECK-RV64-NEXT:    j .LBB61_588
+; CHECK-RV64-NEXT:  .LBB61_66: # %else250
+; CHECK-RV64-NEXT:    andi a2, a1, 1
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_67
+; CHECK-RV64-NEXT:    j .LBB61_589
+; CHECK-RV64-NEXT:  .LBB61_67: # %else254
+; CHECK-RV64-NEXT:    andi a2, a1, 2
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_68
+; CHECK-RV64-NEXT:    j .LBB61_590
+; CHECK-RV64-NEXT:  .LBB61_68: # %else258
+; CHECK-RV64-NEXT:    andi a2, a1, 4
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_69
+; CHECK-RV64-NEXT:    j .LBB61_591
+; CHECK-RV64-NEXT:  .LBB61_69: # %else262
+; CHECK-RV64-NEXT:    andi a2, a1, 8
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_70
+; CHECK-RV64-NEXT:    j .LBB61_592
+; CHECK-RV64-NEXT:  .LBB61_70: # %else266
+; CHECK-RV64-NEXT:    andi a2, a1, 16
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_71
+; CHECK-RV64-NEXT:    j .LBB61_593
+; CHECK-RV64-NEXT:  .LBB61_71: # %else270
+; CHECK-RV64-NEXT:    andi a2, a1, 32
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_72
+; CHECK-RV64-NEXT:    j .LBB61_594
+; CHECK-RV64-NEXT:  .LBB61_72: # %else274
+; CHECK-RV64-NEXT:    andi a2, a1, 64
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_73
+; CHECK-RV64-NEXT:    j .LBB61_595
+; CHECK-RV64-NEXT:  .LBB61_73: # %else278
+; CHECK-RV64-NEXT:    andi a2, a1, 128
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_74
+; CHECK-RV64-NEXT:    j .LBB61_596
+; CHECK-RV64-NEXT:  .LBB61_74: # %else282
+; CHECK-RV64-NEXT:    andi a2, a1, 256
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_75
+; CHECK-RV64-NEXT:    j .LBB61_597
+; CHECK-RV64-NEXT:  .LBB61_75: # %else286
+; CHECK-RV64-NEXT:    andi a2, a1, 512
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_76
+; CHECK-RV64-NEXT:    j .LBB61_598
+; CHECK-RV64-NEXT:  .LBB61_76: # %else290
+; CHECK-RV64-NEXT:    andi a2, a1, 1024
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_77
+; CHECK-RV64-NEXT:    j .LBB61_599
+; CHECK-RV64-NEXT:  .LBB61_77: # %else294
+; CHECK-RV64-NEXT:    slli a2, a1, 52
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_78
+; CHECK-RV64-NEXT:    j .LBB61_600
+; CHECK-RV64-NEXT:  .LBB61_78: # %else298
+; CHECK-RV64-NEXT:    slli a2, a1, 51
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_79
+; CHECK-RV64-NEXT:    j .LBB61_601
+; CHECK-RV64-NEXT:  .LBB61_79: # %else302
+; CHECK-RV64-NEXT:    slli a2, a1, 50
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_80
+; CHECK-RV64-NEXT:    j .LBB61_602
+; CHECK-RV64-NEXT:  .LBB61_80: # %else306
+; CHECK-RV64-NEXT:    slli a2, a1, 49
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_81
+; CHECK-RV64-NEXT:    j .LBB61_603
+; CHECK-RV64-NEXT:  .LBB61_81: # %else310
+; CHECK-RV64-NEXT:    slli a2, a1, 48
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_82
+; CHECK-RV64-NEXT:    j .LBB61_604
+; CHECK-RV64-NEXT:  .LBB61_82: # %else314
+; CHECK-RV64-NEXT:    slli a2, a1, 47
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_83
+; CHECK-RV64-NEXT:    j .LBB61_605
+; CHECK-RV64-NEXT:  .LBB61_83: # %else318
+; CHECK-RV64-NEXT:    slli a2, a1, 46
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_84
+; CHECK-RV64-NEXT:    j .LBB61_606
+; CHECK-RV64-NEXT:  .LBB61_84: # %else322
+; CHECK-RV64-NEXT:    slli a2, a1, 45
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_85
+; CHECK-RV64-NEXT:    j .LBB61_607
+; CHECK-RV64-NEXT:  .LBB61_85: # %else326
+; CHECK-RV64-NEXT:    slli a2, a1, 44
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_86
+; CHECK-RV64-NEXT:    j .LBB61_608
+; CHECK-RV64-NEXT:  .LBB61_86: # %else330
+; CHECK-RV64-NEXT:    slli a2, a1, 43
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_87
+; CHECK-RV64-NEXT:    j .LBB61_609
+; CHECK-RV64-NEXT:  .LBB61_87: # %else334
+; CHECK-RV64-NEXT:    slli a2, a1, 42
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_88
+; CHECK-RV64-NEXT:    j .LBB61_610
+; CHECK-RV64-NEXT:  .LBB61_88: # %else338
+; CHECK-RV64-NEXT:    slli a2, a1, 41
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_89
+; CHECK-RV64-NEXT:    j .LBB61_611
+; CHECK-RV64-NEXT:  .LBB61_89: # %else342
+; CHECK-RV64-NEXT:    slli a2, a1, 40
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_90
+; CHECK-RV64-NEXT:    j .LBB61_612
+; CHECK-RV64-NEXT:  .LBB61_90: # %else346
+; CHECK-RV64-NEXT:    slli a2, a1, 39
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_91
+; CHECK-RV64-NEXT:    j .LBB61_613
+; CHECK-RV64-NEXT:  .LBB61_91: # %else350
+; CHECK-RV64-NEXT:    slli a2, a1, 38
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_92
+; CHECK-RV64-NEXT:    j .LBB61_614
+; CHECK-RV64-NEXT:  .LBB61_92: # %else354
+; CHECK-RV64-NEXT:    slli a2, a1, 37
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_93
+; CHECK-RV64-NEXT:    j .LBB61_615
+; CHECK-RV64-NEXT:  .LBB61_93: # %else358
+; CHECK-RV64-NEXT:    slli a2, a1, 36
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_94
+; CHECK-RV64-NEXT:    j .LBB61_616
+; CHECK-RV64-NEXT:  .LBB61_94: # %else362
+; CHECK-RV64-NEXT:    slli a2, a1, 35
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_95
+; CHECK-RV64-NEXT:    j .LBB61_617
+; CHECK-RV64-NEXT:  .LBB61_95: # %else366
+; CHECK-RV64-NEXT:    slli a2, a1, 34
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_96
+; CHECK-RV64-NEXT:    j .LBB61_618
+; CHECK-RV64-NEXT:  .LBB61_96: # %else370
+; CHECK-RV64-NEXT:    slli a2, a1, 33
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_97
+; CHECK-RV64-NEXT:    j .LBB61_619
+; CHECK-RV64-NEXT:  .LBB61_97: # %else374
+; CHECK-RV64-NEXT:    slli a2, a1, 32
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_98
+; CHECK-RV64-NEXT:    j .LBB61_620
+; CHECK-RV64-NEXT:  .LBB61_98: # %else378
+; CHECK-RV64-NEXT:    slli a2, a1, 31
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_99
+; CHECK-RV64-NEXT:    j .LBB61_621
+; CHECK-RV64-NEXT:  .LBB61_99: # %else382
+; CHECK-RV64-NEXT:    slli a2, a1, 30
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_100
+; CHECK-RV64-NEXT:    j .LBB61_622
+; CHECK-RV64-NEXT:  .LBB61_100: # %else386
+; CHECK-RV64-NEXT:    slli a2, a1, 29
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_101
+; CHECK-RV64-NEXT:    j .LBB61_623
+; CHECK-RV64-NEXT:  .LBB61_101: # %else390
+; CHECK-RV64-NEXT:    slli a2, a1, 28
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_102
+; CHECK-RV64-NEXT:    j .LBB61_624
+; CHECK-RV64-NEXT:  .LBB61_102: # %else394
+; CHECK-RV64-NEXT:    slli a2, a1, 27
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_103
+; CHECK-RV64-NEXT:    j .LBB61_625
+; CHECK-RV64-NEXT:  .LBB61_103: # %else398
+; CHECK-RV64-NEXT:    slli a2, a1, 26
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_104
+; CHECK-RV64-NEXT:    j .LBB61_626
+; CHECK-RV64-NEXT:  .LBB61_104: # %else402
+; CHECK-RV64-NEXT:    slli a2, a1, 25
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_105
+; CHECK-RV64-NEXT:    j .LBB61_627
+; CHECK-RV64-NEXT:  .LBB61_105: # %else406
+; CHECK-RV64-NEXT:    slli a2, a1, 24
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_106
+; CHECK-RV64-NEXT:    j .LBB61_628
+; CHECK-RV64-NEXT:  .LBB61_106: # %else410
+; CHECK-RV64-NEXT:    slli a2, a1, 23
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_107
+; CHECK-RV64-NEXT:    j .LBB61_629
+; CHECK-RV64-NEXT:  .LBB61_107: # %else414
+; CHECK-RV64-NEXT:    slli a2, a1, 22
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_108
+; CHECK-RV64-NEXT:    j .LBB61_630
+; CHECK-RV64-NEXT:  .LBB61_108: # %else418
+; CHECK-RV64-NEXT:    slli a2, a1, 21
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_109
+; CHECK-RV64-NEXT:    j .LBB61_631
+; CHECK-RV64-NEXT:  .LBB61_109: # %else422
+; CHECK-RV64-NEXT:    slli a2, a1, 20
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_110
+; CHECK-RV64-NEXT:    j .LBB61_632
+; CHECK-RV64-NEXT:  .LBB61_110: # %else426
+; CHECK-RV64-NEXT:    slli a2, a1, 19
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_111
+; CHECK-RV64-NEXT:    j .LBB61_633
+; CHECK-RV64-NEXT:  .LBB61_111: # %else430
+; CHECK-RV64-NEXT:    slli a2, a1, 18
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_112
+; CHECK-RV64-NEXT:    j .LBB61_634
+; CHECK-RV64-NEXT:  .LBB61_112: # %else434
+; CHECK-RV64-NEXT:    slli a2, a1, 17
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_113
+; CHECK-RV64-NEXT:    j .LBB61_635
+; CHECK-RV64-NEXT:  .LBB61_113: # %else438
+; CHECK-RV64-NEXT:    slli a2, a1, 16
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_114
+; CHECK-RV64-NEXT:    j .LBB61_636
+; CHECK-RV64-NEXT:  .LBB61_114: # %else442
+; CHECK-RV64-NEXT:    slli a2, a1, 15
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_115
+; CHECK-RV64-NEXT:    j .LBB61_637
+; CHECK-RV64-NEXT:  .LBB61_115: # %else446
+; CHECK-RV64-NEXT:    slli a2, a1, 14
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_116
+; CHECK-RV64-NEXT:    j .LBB61_638
+; CHECK-RV64-NEXT:  .LBB61_116: # %else450
+; CHECK-RV64-NEXT:    slli a2, a1, 13
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_117
+; CHECK-RV64-NEXT:    j .LBB61_639
+; CHECK-RV64-NEXT:  .LBB61_117: # %else454
+; CHECK-RV64-NEXT:    slli a2, a1, 12
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_118
+; CHECK-RV64-NEXT:    j .LBB61_640
+; CHECK-RV64-NEXT:  .LBB61_118: # %else458
+; CHECK-RV64-NEXT:    slli a2, a1, 11
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_119
+; CHECK-RV64-NEXT:    j .LBB61_641
+; CHECK-RV64-NEXT:  .LBB61_119: # %else462
+; CHECK-RV64-NEXT:    slli a2, a1, 10
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_120
+; CHECK-RV64-NEXT:    j .LBB61_642
+; CHECK-RV64-NEXT:  .LBB61_120: # %else466
+; CHECK-RV64-NEXT:    slli a2, a1, 9
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_121
+; CHECK-RV64-NEXT:    j .LBB61_643
+; CHECK-RV64-NEXT:  .LBB61_121: # %else470
+; CHECK-RV64-NEXT:    slli a2, a1, 8
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_122
+; CHECK-RV64-NEXT:    j .LBB61_644
+; CHECK-RV64-NEXT:  .LBB61_122: # %else474
+; CHECK-RV64-NEXT:    slli a2, a1, 7
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_123
+; CHECK-RV64-NEXT:    j .LBB61_645
+; CHECK-RV64-NEXT:  .LBB61_123: # %else478
+; CHECK-RV64-NEXT:    slli a2, a1, 6
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_124
+; CHECK-RV64-NEXT:    j .LBB61_646
+; CHECK-RV64-NEXT:  .LBB61_124: # %else482
+; CHECK-RV64-NEXT:    slli a2, a1, 5
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_125
+; CHECK-RV64-NEXT:    j .LBB61_647
+; CHECK-RV64-NEXT:  .LBB61_125: # %else486
+; CHECK-RV64-NEXT:    slli a2, a1, 4
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_126
+; CHECK-RV64-NEXT:    j .LBB61_648
+; CHECK-RV64-NEXT:  .LBB61_126: # %else490
+; CHECK-RV64-NEXT:    slli a2, a1, 3
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_127
+; CHECK-RV64-NEXT:    j .LBB61_649
+; CHECK-RV64-NEXT:  .LBB61_127: # %else494
+; CHECK-RV64-NEXT:    slli a2, a1, 2
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_129
+; CHECK-RV64-NEXT:  .LBB61_128: # %cond.load497
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 126
+; CHECK-RV64-NEXT:    li a3, 125
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:  .LBB61_129: # %else498
+; CHECK-RV64-NEXT:    slli a2, a1, 1
+; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV64-NEXT:    vslidedown.vi v16, v0, 2
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_131
+; CHECK-RV64-NEXT:  # %bb.130: # %cond.load501
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    vmv.s.x v18, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 127
+; CHECK-RV64-NEXT:    li a3, 126
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v18, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:  .LBB61_131: # %else502
+; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.x.s a2, v16
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_132
+; CHECK-RV64-NEXT:    j .LBB61_650
+; CHECK-RV64-NEXT:  .LBB61_132: # %else506
+; CHECK-RV64-NEXT:    andi a1, a2, 1
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_133
+; CHECK-RV64-NEXT:    j .LBB61_651
+; CHECK-RV64-NEXT:  .LBB61_133: # %else510
+; CHECK-RV64-NEXT:    andi a1, a2, 2
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_134
+; CHECK-RV64-NEXT:    j .LBB61_652
+; CHECK-RV64-NEXT:  .LBB61_134: # %else514
+; CHECK-RV64-NEXT:    andi a1, a2, 4
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_135
+; CHECK-RV64-NEXT:    j .LBB61_653
+; CHECK-RV64-NEXT:  .LBB61_135: # %else518
+; CHECK-RV64-NEXT:    andi a1, a2, 8
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_136
+; CHECK-RV64-NEXT:    j .LBB61_654
+; CHECK-RV64-NEXT:  .LBB61_136: # %else522
+; CHECK-RV64-NEXT:    andi a1, a2, 16
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_137
+; CHECK-RV64-NEXT:    j .LBB61_655
+; CHECK-RV64-NEXT:  .LBB61_137: # %else526
+; CHECK-RV64-NEXT:    andi a1, a2, 32
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_138
+; CHECK-RV64-NEXT:    j .LBB61_656
+; CHECK-RV64-NEXT:  .LBB61_138: # %else530
+; CHECK-RV64-NEXT:    andi a1, a2, 64
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_139
+; CHECK-RV64-NEXT:    j .LBB61_657
+; CHECK-RV64-NEXT:  .LBB61_139: # %else534
+; CHECK-RV64-NEXT:    andi a1, a2, 128
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_140
+; CHECK-RV64-NEXT:    j .LBB61_658
+; CHECK-RV64-NEXT:  .LBB61_140: # %else538
+; CHECK-RV64-NEXT:    andi a1, a2, 256
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_141
+; CHECK-RV64-NEXT:    j .LBB61_659
+; CHECK-RV64-NEXT:  .LBB61_141: # %else542
+; CHECK-RV64-NEXT:    andi a1, a2, 512
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_142
+; CHECK-RV64-NEXT:    j .LBB61_660
+; CHECK-RV64-NEXT:  .LBB61_142: # %else546
+; CHECK-RV64-NEXT:    andi a1, a2, 1024
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_143
+; CHECK-RV64-NEXT:    j .LBB61_661
+; CHECK-RV64-NEXT:  .LBB61_143: # %else550
+; CHECK-RV64-NEXT:    slli a1, a2, 52
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_144
+; CHECK-RV64-NEXT:    j .LBB61_662
+; CHECK-RV64-NEXT:  .LBB61_144: # %else554
+; CHECK-RV64-NEXT:    slli a1, a2, 51
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_145
+; CHECK-RV64-NEXT:    j .LBB61_663
+; CHECK-RV64-NEXT:  .LBB61_145: # %else558
+; CHECK-RV64-NEXT:    slli a1, a2, 50
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_146
+; CHECK-RV64-NEXT:    j .LBB61_664
+; CHECK-RV64-NEXT:  .LBB61_146: # %else562
+; CHECK-RV64-NEXT:    slli a1, a2, 49
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_147
+; CHECK-RV64-NEXT:    j .LBB61_665
+; CHECK-RV64-NEXT:  .LBB61_147: # %else566
+; CHECK-RV64-NEXT:    slli a1, a2, 48
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_148
+; CHECK-RV64-NEXT:    j .LBB61_666
+; CHECK-RV64-NEXT:  .LBB61_148: # %else570
+; CHECK-RV64-NEXT:    slli a1, a2, 47
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_149
+; CHECK-RV64-NEXT:    j .LBB61_667
+; CHECK-RV64-NEXT:  .LBB61_149: # %else574
+; CHECK-RV64-NEXT:    slli a1, a2, 46
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_150
+; CHECK-RV64-NEXT:    j .LBB61_668
+; CHECK-RV64-NEXT:  .LBB61_150: # %else578
+; CHECK-RV64-NEXT:    slli a1, a2, 45
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_151
+; CHECK-RV64-NEXT:    j .LBB61_669
+; CHECK-RV64-NEXT:  .LBB61_151: # %else582
+; CHECK-RV64-NEXT:    slli a1, a2, 44
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_152
+; CHECK-RV64-NEXT:    j .LBB61_670
+; CHECK-RV64-NEXT:  .LBB61_152: # %else586
+; CHECK-RV64-NEXT:    slli a1, a2, 43
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_153
+; CHECK-RV64-NEXT:    j .LBB61_671
+; CHECK-RV64-NEXT:  .LBB61_153: # %else590
+; CHECK-RV64-NEXT:    slli a1, a2, 42
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_154
+; CHECK-RV64-NEXT:    j .LBB61_672
+; CHECK-RV64-NEXT:  .LBB61_154: # %else594
+; CHECK-RV64-NEXT:    slli a1, a2, 41
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_155
+; CHECK-RV64-NEXT:    j .LBB61_673
+; CHECK-RV64-NEXT:  .LBB61_155: # %else598
+; CHECK-RV64-NEXT:    slli a1, a2, 40
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_156
+; CHECK-RV64-NEXT:    j .LBB61_674
+; CHECK-RV64-NEXT:  .LBB61_156: # %else602
+; CHECK-RV64-NEXT:    slli a1, a2, 39
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_157
+; CHECK-RV64-NEXT:    j .LBB61_675
+; CHECK-RV64-NEXT:  .LBB61_157: # %else606
+; CHECK-RV64-NEXT:    slli a1, a2, 38
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_158
+; CHECK-RV64-NEXT:    j .LBB61_676
+; CHECK-RV64-NEXT:  .LBB61_158: # %else610
+; CHECK-RV64-NEXT:    slli a1, a2, 37
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_159
+; CHECK-RV64-NEXT:    j .LBB61_677
+; CHECK-RV64-NEXT:  .LBB61_159: # %else614
+; CHECK-RV64-NEXT:    slli a1, a2, 36
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_160
+; CHECK-RV64-NEXT:    j .LBB61_678
+; CHECK-RV64-NEXT:  .LBB61_160: # %else618
+; CHECK-RV64-NEXT:    slli a1, a2, 35
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_161
+; CHECK-RV64-NEXT:    j .LBB61_679
+; CHECK-RV64-NEXT:  .LBB61_161: # %else622
+; CHECK-RV64-NEXT:    slli a1, a2, 34
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_162
+; CHECK-RV64-NEXT:    j .LBB61_680
+; CHECK-RV64-NEXT:  .LBB61_162: # %else626
+; CHECK-RV64-NEXT:    slli a1, a2, 33
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_163
+; CHECK-RV64-NEXT:    j .LBB61_681
+; CHECK-RV64-NEXT:  .LBB61_163: # %else630
+; CHECK-RV64-NEXT:    slli a1, a2, 32
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_164
+; CHECK-RV64-NEXT:    j .LBB61_682
+; CHECK-RV64-NEXT:  .LBB61_164: # %else634
+; CHECK-RV64-NEXT:    slli a1, a2, 31
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_165
+; CHECK-RV64-NEXT:    j .LBB61_683
+; CHECK-RV64-NEXT:  .LBB61_165: # %else638
+; CHECK-RV64-NEXT:    slli a1, a2, 30
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_166
+; CHECK-RV64-NEXT:    j .LBB61_684
+; CHECK-RV64-NEXT:  .LBB61_166: # %else642
+; CHECK-RV64-NEXT:    slli a1, a2, 29
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_167
+; CHECK-RV64-NEXT:    j .LBB61_685
+; CHECK-RV64-NEXT:  .LBB61_167: # %else646
+; CHECK-RV64-NEXT:    slli a1, a2, 28
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_168
+; CHECK-RV64-NEXT:    j .LBB61_686
+; CHECK-RV64-NEXT:  .LBB61_168: # %else650
+; CHECK-RV64-NEXT:    slli a1, a2, 27
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_169
+; CHECK-RV64-NEXT:    j .LBB61_687
+; CHECK-RV64-NEXT:  .LBB61_169: # %else654
+; CHECK-RV64-NEXT:    slli a1, a2, 26
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_170
+; CHECK-RV64-NEXT:    j .LBB61_688
+; CHECK-RV64-NEXT:  .LBB61_170: # %else658
+; CHECK-RV64-NEXT:    slli a1, a2, 25
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_171
+; CHECK-RV64-NEXT:    j .LBB61_689
+; CHECK-RV64-NEXT:  .LBB61_171: # %else662
+; CHECK-RV64-NEXT:    slli a1, a2, 24
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_172
+; CHECK-RV64-NEXT:    j .LBB61_690
+; CHECK-RV64-NEXT:  .LBB61_172: # %else666
+; CHECK-RV64-NEXT:    slli a1, a2, 23
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_173
+; CHECK-RV64-NEXT:    j .LBB61_691
+; CHECK-RV64-NEXT:  .LBB61_173: # %else670
+; CHECK-RV64-NEXT:    slli a1, a2, 22
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_174
+; CHECK-RV64-NEXT:    j .LBB61_692
+; CHECK-RV64-NEXT:  .LBB61_174: # %else674
+; CHECK-RV64-NEXT:    slli a1, a2, 21
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_175
+; CHECK-RV64-NEXT:    j .LBB61_693
+; CHECK-RV64-NEXT:  .LBB61_175: # %else678
+; CHECK-RV64-NEXT:    slli a1, a2, 20
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_176
+; CHECK-RV64-NEXT:    j .LBB61_694
+; CHECK-RV64-NEXT:  .LBB61_176: # %else682
+; CHECK-RV64-NEXT:    slli a1, a2, 19
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_177
+; CHECK-RV64-NEXT:    j .LBB61_695
+; CHECK-RV64-NEXT:  .LBB61_177: # %else686
+; CHECK-RV64-NEXT:    slli a1, a2, 18
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_178
+; CHECK-RV64-NEXT:    j .LBB61_696
+; CHECK-RV64-NEXT:  .LBB61_178: # %else690
+; CHECK-RV64-NEXT:    slli a1, a2, 17
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_179
+; CHECK-RV64-NEXT:    j .LBB61_697
+; CHECK-RV64-NEXT:  .LBB61_179: # %else694
+; CHECK-RV64-NEXT:    slli a1, a2, 16
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_180
+; CHECK-RV64-NEXT:    j .LBB61_698
+; CHECK-RV64-NEXT:  .LBB61_180: # %else698
+; CHECK-RV64-NEXT:    slli a1, a2, 15
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_181
+; CHECK-RV64-NEXT:    j .LBB61_699
+; CHECK-RV64-NEXT:  .LBB61_181: # %else702
+; CHECK-RV64-NEXT:    slli a1, a2, 14
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_182
+; CHECK-RV64-NEXT:    j .LBB61_700
+; CHECK-RV64-NEXT:  .LBB61_182: # %else706
+; CHECK-RV64-NEXT:    slli a1, a2, 13
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_183
+; CHECK-RV64-NEXT:    j .LBB61_701
+; CHECK-RV64-NEXT:  .LBB61_183: # %else710
+; CHECK-RV64-NEXT:    slli a1, a2, 12
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_184
+; CHECK-RV64-NEXT:    j .LBB61_702
+; CHECK-RV64-NEXT:  .LBB61_184: # %else714
+; CHECK-RV64-NEXT:    slli a1, a2, 11
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_185
+; CHECK-RV64-NEXT:    j .LBB61_703
+; CHECK-RV64-NEXT:  .LBB61_185: # %else718
+; CHECK-RV64-NEXT:    slli a1, a2, 10
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_186
+; CHECK-RV64-NEXT:    j .LBB61_704
+; CHECK-RV64-NEXT:  .LBB61_186: # %else722
+; CHECK-RV64-NEXT:    slli a1, a2, 9
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_187
+; CHECK-RV64-NEXT:    j .LBB61_705
+; CHECK-RV64-NEXT:  .LBB61_187: # %else726
+; CHECK-RV64-NEXT:    slli a1, a2, 8
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_188
+; CHECK-RV64-NEXT:    j .LBB61_706
+; CHECK-RV64-NEXT:  .LBB61_188: # %else730
+; CHECK-RV64-NEXT:    slli a1, a2, 7
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_189
+; CHECK-RV64-NEXT:    j .LBB61_707
+; CHECK-RV64-NEXT:  .LBB61_189: # %else734
+; CHECK-RV64-NEXT:    slli a1, a2, 6
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_190
+; CHECK-RV64-NEXT:    j .LBB61_708
+; CHECK-RV64-NEXT:  .LBB61_190: # %else738
+; CHECK-RV64-NEXT:    slli a1, a2, 5
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_191
+; CHECK-RV64-NEXT:    j .LBB61_709
+; CHECK-RV64-NEXT:  .LBB61_191: # %else742
+; CHECK-RV64-NEXT:    slli a1, a2, 4
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_192
+; CHECK-RV64-NEXT:    j .LBB61_710
+; CHECK-RV64-NEXT:  .LBB61_192: # %else746
+; CHECK-RV64-NEXT:    slli a1, a2, 3
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_193
+; CHECK-RV64-NEXT:    j .LBB61_711
+; CHECK-RV64-NEXT:  .LBB61_193: # %else750
+; CHECK-RV64-NEXT:    slli a1, a2, 2
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_195
+; CHECK-RV64-NEXT:  .LBB61_194: # %cond.load753
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 190
+; CHECK-RV64-NEXT:    li a3, 189
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:  .LBB61_195: # %else754
+; CHECK-RV64-NEXT:    slli a1, a2, 1
+; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV64-NEXT:    vslidedown.vi v16, v0, 3
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_197
+; CHECK-RV64-NEXT:  # %bb.196: # %cond.load757
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vmv.s.x v20, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 191
+; CHECK-RV64-NEXT:    li a3, 190
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v20, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:  .LBB61_197: # %else758
+; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.x.s a1, v16
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_198
+; CHECK-RV64-NEXT:    j .LBB61_712
+; CHECK-RV64-NEXT:  .LBB61_198: # %else762
+; CHECK-RV64-NEXT:    andi a2, a1, 1
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_199
+; CHECK-RV64-NEXT:    j .LBB61_713
+; CHECK-RV64-NEXT:  .LBB61_199: # %else766
+; CHECK-RV64-NEXT:    andi a2, a1, 2
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_200
+; CHECK-RV64-NEXT:    j .LBB61_714
+; CHECK-RV64-NEXT:  .LBB61_200: # %else770
+; CHECK-RV64-NEXT:    andi a2, a1, 4
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_201
+; CHECK-RV64-NEXT:    j .LBB61_715
+; CHECK-RV64-NEXT:  .LBB61_201: # %else774
+; CHECK-RV64-NEXT:    andi a2, a1, 8
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_202
+; CHECK-RV64-NEXT:    j .LBB61_716
+; CHECK-RV64-NEXT:  .LBB61_202: # %else778
+; CHECK-RV64-NEXT:    andi a2, a1, 16
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_203
+; CHECK-RV64-NEXT:    j .LBB61_717
+; CHECK-RV64-NEXT:  .LBB61_203: # %else782
+; CHECK-RV64-NEXT:    andi a2, a1, 32
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_204
+; CHECK-RV64-NEXT:    j .LBB61_718
+; CHECK-RV64-NEXT:  .LBB61_204: # %else786
+; CHECK-RV64-NEXT:    andi a2, a1, 64
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_205
+; CHECK-RV64-NEXT:    j .LBB61_719
+; CHECK-RV64-NEXT:  .LBB61_205: # %else790
+; CHECK-RV64-NEXT:    andi a2, a1, 128
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_206
+; CHECK-RV64-NEXT:    j .LBB61_720
+; CHECK-RV64-NEXT:  .LBB61_206: # %else794
+; CHECK-RV64-NEXT:    andi a2, a1, 256
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_207
+; CHECK-RV64-NEXT:    j .LBB61_721
+; CHECK-RV64-NEXT:  .LBB61_207: # %else798
+; CHECK-RV64-NEXT:    andi a2, a1, 512
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_208
+; CHECK-RV64-NEXT:    j .LBB61_722
+; CHECK-RV64-NEXT:  .LBB61_208: # %else802
+; CHECK-RV64-NEXT:    andi a2, a1, 1024
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_209
+; CHECK-RV64-NEXT:    j .LBB61_723
+; CHECK-RV64-NEXT:  .LBB61_209: # %else806
+; CHECK-RV64-NEXT:    slli a2, a1, 52
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_210
+; CHECK-RV64-NEXT:    j .LBB61_724
+; CHECK-RV64-NEXT:  .LBB61_210: # %else810
+; CHECK-RV64-NEXT:    slli a2, a1, 51
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_211
+; CHECK-RV64-NEXT:    j .LBB61_725
+; CHECK-RV64-NEXT:  .LBB61_211: # %else814
+; CHECK-RV64-NEXT:    slli a2, a1, 50
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_212
+; CHECK-RV64-NEXT:    j .LBB61_726
+; CHECK-RV64-NEXT:  .LBB61_212: # %else818
+; CHECK-RV64-NEXT:    slli a2, a1, 49
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_213
+; CHECK-RV64-NEXT:    j .LBB61_727
+; CHECK-RV64-NEXT:  .LBB61_213: # %else822
+; CHECK-RV64-NEXT:    slli a2, a1, 48
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_214
+; CHECK-RV64-NEXT:    j .LBB61_728
+; CHECK-RV64-NEXT:  .LBB61_214: # %else826
+; CHECK-RV64-NEXT:    slli a2, a1, 47
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_215
+; CHECK-RV64-NEXT:    j .LBB61_729
+; CHECK-RV64-NEXT:  .LBB61_215: # %else830
+; CHECK-RV64-NEXT:    slli a2, a1, 46
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_216
+; CHECK-RV64-NEXT:    j .LBB61_730
+; CHECK-RV64-NEXT:  .LBB61_216: # %else834
+; CHECK-RV64-NEXT:    slli a2, a1, 45
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_217
+; CHECK-RV64-NEXT:    j .LBB61_731
+; CHECK-RV64-NEXT:  .LBB61_217: # %else838
+; CHECK-RV64-NEXT:    slli a2, a1, 44
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_218
+; CHECK-RV64-NEXT:    j .LBB61_732
+; CHECK-RV64-NEXT:  .LBB61_218: # %else842
+; CHECK-RV64-NEXT:    slli a2, a1, 43
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_219
+; CHECK-RV64-NEXT:    j .LBB61_733
+; CHECK-RV64-NEXT:  .LBB61_219: # %else846
+; CHECK-RV64-NEXT:    slli a2, a1, 42
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_220
+; CHECK-RV64-NEXT:    j .LBB61_734
+; CHECK-RV64-NEXT:  .LBB61_220: # %else850
+; CHECK-RV64-NEXT:    slli a2, a1, 41
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_221
+; CHECK-RV64-NEXT:    j .LBB61_735
+; CHECK-RV64-NEXT:  .LBB61_221: # %else854
+; CHECK-RV64-NEXT:    slli a2, a1, 40
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_222
+; CHECK-RV64-NEXT:    j .LBB61_736
+; CHECK-RV64-NEXT:  .LBB61_222: # %else858
+; CHECK-RV64-NEXT:    slli a2, a1, 39
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_223
+; CHECK-RV64-NEXT:    j .LBB61_737
+; CHECK-RV64-NEXT:  .LBB61_223: # %else862
+; CHECK-RV64-NEXT:    slli a2, a1, 38
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_224
+; CHECK-RV64-NEXT:    j .LBB61_738
+; CHECK-RV64-NEXT:  .LBB61_224: # %else866
+; CHECK-RV64-NEXT:    slli a2, a1, 37
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_225
+; CHECK-RV64-NEXT:    j .LBB61_739
+; CHECK-RV64-NEXT:  .LBB61_225: # %else870
+; CHECK-RV64-NEXT:    slli a2, a1, 36
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_226
+; CHECK-RV64-NEXT:    j .LBB61_740
+; CHECK-RV64-NEXT:  .LBB61_226: # %else874
+; CHECK-RV64-NEXT:    slli a2, a1, 35
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_227
+; CHECK-RV64-NEXT:    j .LBB61_741
+; CHECK-RV64-NEXT:  .LBB61_227: # %else878
+; CHECK-RV64-NEXT:    slli a2, a1, 34
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_228
+; CHECK-RV64-NEXT:    j .LBB61_742
+; CHECK-RV64-NEXT:  .LBB61_228: # %else882
+; CHECK-RV64-NEXT:    slli a2, a1, 33
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_229
+; CHECK-RV64-NEXT:    j .LBB61_743
+; CHECK-RV64-NEXT:  .LBB61_229: # %else886
+; CHECK-RV64-NEXT:    slli a2, a1, 32
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_230
+; CHECK-RV64-NEXT:    j .LBB61_744
+; CHECK-RV64-NEXT:  .LBB61_230: # %else890
+; CHECK-RV64-NEXT:    slli a2, a1, 31
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_231
+; CHECK-RV64-NEXT:    j .LBB61_745
+; CHECK-RV64-NEXT:  .LBB61_231: # %else894
+; CHECK-RV64-NEXT:    slli a2, a1, 30
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_232
+; CHECK-RV64-NEXT:    j .LBB61_746
+; CHECK-RV64-NEXT:  .LBB61_232: # %else898
+; CHECK-RV64-NEXT:    slli a2, a1, 29
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_233
+; CHECK-RV64-NEXT:    j .LBB61_747
+; CHECK-RV64-NEXT:  .LBB61_233: # %else902
+; CHECK-RV64-NEXT:    slli a2, a1, 28
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_234
+; CHECK-RV64-NEXT:    j .LBB61_748
+; CHECK-RV64-NEXT:  .LBB61_234: # %else906
+; CHECK-RV64-NEXT:    slli a2, a1, 27
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_235
+; CHECK-RV64-NEXT:    j .LBB61_749
+; CHECK-RV64-NEXT:  .LBB61_235: # %else910
+; CHECK-RV64-NEXT:    slli a2, a1, 26
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_236
+; CHECK-RV64-NEXT:    j .LBB61_750
+; CHECK-RV64-NEXT:  .LBB61_236: # %else914
+; CHECK-RV64-NEXT:    slli a2, a1, 25
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_237
+; CHECK-RV64-NEXT:    j .LBB61_751
+; CHECK-RV64-NEXT:  .LBB61_237: # %else918
+; CHECK-RV64-NEXT:    slli a2, a1, 24
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_238
+; CHECK-RV64-NEXT:    j .LBB61_752
+; CHECK-RV64-NEXT:  .LBB61_238: # %else922
+; CHECK-RV64-NEXT:    slli a2, a1, 23
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_239
+; CHECK-RV64-NEXT:    j .LBB61_753
+; CHECK-RV64-NEXT:  .LBB61_239: # %else926
+; CHECK-RV64-NEXT:    slli a2, a1, 22
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_240
+; CHECK-RV64-NEXT:    j .LBB61_754
+; CHECK-RV64-NEXT:  .LBB61_240: # %else930
+; CHECK-RV64-NEXT:    slli a2, a1, 21
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_241
+; CHECK-RV64-NEXT:    j .LBB61_755
+; CHECK-RV64-NEXT:  .LBB61_241: # %else934
+; CHECK-RV64-NEXT:    slli a2, a1, 20
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_242
+; CHECK-RV64-NEXT:    j .LBB61_756
+; CHECK-RV64-NEXT:  .LBB61_242: # %else938
+; CHECK-RV64-NEXT:    slli a2, a1, 19
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_243
+; CHECK-RV64-NEXT:    j .LBB61_757
+; CHECK-RV64-NEXT:  .LBB61_243: # %else942
+; CHECK-RV64-NEXT:    slli a2, a1, 18
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_244
+; CHECK-RV64-NEXT:    j .LBB61_758
+; CHECK-RV64-NEXT:  .LBB61_244: # %else946
+; CHECK-RV64-NEXT:    slli a2, a1, 17
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_245
+; CHECK-RV64-NEXT:    j .LBB61_759
+; CHECK-RV64-NEXT:  .LBB61_245: # %else950
+; CHECK-RV64-NEXT:    slli a2, a1, 16
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_246
+; CHECK-RV64-NEXT:    j .LBB61_760
+; CHECK-RV64-NEXT:  .LBB61_246: # %else954
+; CHECK-RV64-NEXT:    slli a2, a1, 15
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_247
+; CHECK-RV64-NEXT:    j .LBB61_761
+; CHECK-RV64-NEXT:  .LBB61_247: # %else958
+; CHECK-RV64-NEXT:    slli a2, a1, 14
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_248
+; CHECK-RV64-NEXT:    j .LBB61_762
+; CHECK-RV64-NEXT:  .LBB61_248: # %else962
+; CHECK-RV64-NEXT:    slli a2, a1, 13
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_249
+; CHECK-RV64-NEXT:    j .LBB61_763
+; CHECK-RV64-NEXT:  .LBB61_249: # %else966
+; CHECK-RV64-NEXT:    slli a2, a1, 12
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_250
+; CHECK-RV64-NEXT:    j .LBB61_764
+; CHECK-RV64-NEXT:  .LBB61_250: # %else970
+; CHECK-RV64-NEXT:    slli a2, a1, 11
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_251
+; CHECK-RV64-NEXT:    j .LBB61_765
+; CHECK-RV64-NEXT:  .LBB61_251: # %else974
+; CHECK-RV64-NEXT:    slli a2, a1, 10
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_252
+; CHECK-RV64-NEXT:    j .LBB61_766
+; CHECK-RV64-NEXT:  .LBB61_252: # %else978
+; CHECK-RV64-NEXT:    slli a2, a1, 9
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_253
+; CHECK-RV64-NEXT:    j .LBB61_767
+; CHECK-RV64-NEXT:  .LBB61_253: # %else982
+; CHECK-RV64-NEXT:    slli a2, a1, 8
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_254
+; CHECK-RV64-NEXT:    j .LBB61_768
+; CHECK-RV64-NEXT:  .LBB61_254: # %else986
+; CHECK-RV64-NEXT:    slli a2, a1, 7
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_255
+; CHECK-RV64-NEXT:    j .LBB61_769
+; CHECK-RV64-NEXT:  .LBB61_255: # %else990
+; CHECK-RV64-NEXT:    slli a2, a1, 6
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_256
+; CHECK-RV64-NEXT:    j .LBB61_770
+; CHECK-RV64-NEXT:  .LBB61_256: # %else994
+; CHECK-RV64-NEXT:    slli a2, a1, 5
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_257
+; CHECK-RV64-NEXT:    j .LBB61_771
+; CHECK-RV64-NEXT:  .LBB61_257: # %else998
+; CHECK-RV64-NEXT:    slli a2, a1, 4
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_258
+; CHECK-RV64-NEXT:    j .LBB61_772
+; CHECK-RV64-NEXT:  .LBB61_258: # %else1002
+; CHECK-RV64-NEXT:    slli a2, a1, 3
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_259
+; CHECK-RV64-NEXT:    j .LBB61_773
+; CHECK-RV64-NEXT:  .LBB61_259: # %else1006
+; CHECK-RV64-NEXT:    slli a2, a1, 2
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_261
+; CHECK-RV64-NEXT:  .LBB61_260: # %cond.load1009
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 254
+; CHECK-RV64-NEXT:    li a3, 253
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:  .LBB61_261: # %else1010
+; CHECK-RV64-NEXT:    slli a2, a1, 1
+; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV64-NEXT:    vslidedown.vi v16, v0, 4
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_263
+; CHECK-RV64-NEXT:  # %bb.262: # %cond.load1013
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    vmv.s.x v20, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 255
+; CHECK-RV64-NEXT:    li a3, 254
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v20, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:  .LBB61_263: # %else1014
+; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.x.s a2, v16
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_264
+; CHECK-RV64-NEXT:    j .LBB61_774
+; CHECK-RV64-NEXT:  .LBB61_264: # %else1018
+; CHECK-RV64-NEXT:    andi a1, a2, 1
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_265
+; CHECK-RV64-NEXT:    j .LBB61_775
+; CHECK-RV64-NEXT:  .LBB61_265: # %else1022
+; CHECK-RV64-NEXT:    andi a1, a2, 2
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_266
+; CHECK-RV64-NEXT:    j .LBB61_776
+; CHECK-RV64-NEXT:  .LBB61_266: # %else1026
+; CHECK-RV64-NEXT:    andi a1, a2, 4
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_267
+; CHECK-RV64-NEXT:    j .LBB61_777
+; CHECK-RV64-NEXT:  .LBB61_267: # %else1030
+; CHECK-RV64-NEXT:    andi a1, a2, 8
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_268
+; CHECK-RV64-NEXT:    j .LBB61_778
+; CHECK-RV64-NEXT:  .LBB61_268: # %else1034
+; CHECK-RV64-NEXT:    andi a1, a2, 16
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_269
+; CHECK-RV64-NEXT:    j .LBB61_779
+; CHECK-RV64-NEXT:  .LBB61_269: # %else1038
+; CHECK-RV64-NEXT:    andi a1, a2, 32
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_270
+; CHECK-RV64-NEXT:    j .LBB61_780
+; CHECK-RV64-NEXT:  .LBB61_270: # %else1042
+; CHECK-RV64-NEXT:    andi a1, a2, 64
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_271
+; CHECK-RV64-NEXT:    j .LBB61_781
+; CHECK-RV64-NEXT:  .LBB61_271: # %else1046
+; CHECK-RV64-NEXT:    andi a1, a2, 128
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_272
+; CHECK-RV64-NEXT:    j .LBB61_782
+; CHECK-RV64-NEXT:  .LBB61_272: # %else1050
+; CHECK-RV64-NEXT:    andi a1, a2, 256
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_273
+; CHECK-RV64-NEXT:    j .LBB61_783
+; CHECK-RV64-NEXT:  .LBB61_273: # %else1054
+; CHECK-RV64-NEXT:    andi a1, a2, 512
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_274
+; CHECK-RV64-NEXT:    j .LBB61_784
+; CHECK-RV64-NEXT:  .LBB61_274: # %else1058
+; CHECK-RV64-NEXT:    andi a1, a2, 1024
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_275
+; CHECK-RV64-NEXT:    j .LBB61_785
+; CHECK-RV64-NEXT:  .LBB61_275: # %else1062
+; CHECK-RV64-NEXT:    slli a1, a2, 52
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_276
+; CHECK-RV64-NEXT:    j .LBB61_786
+; CHECK-RV64-NEXT:  .LBB61_276: # %else1066
+; CHECK-RV64-NEXT:    slli a1, a2, 51
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_277
+; CHECK-RV64-NEXT:    j .LBB61_787
+; CHECK-RV64-NEXT:  .LBB61_277: # %else1070
+; CHECK-RV64-NEXT:    slli a1, a2, 50
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_278
+; CHECK-RV64-NEXT:    j .LBB61_788
+; CHECK-RV64-NEXT:  .LBB61_278: # %else1074
+; CHECK-RV64-NEXT:    slli a1, a2, 49
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_279
+; CHECK-RV64-NEXT:    j .LBB61_789
+; CHECK-RV64-NEXT:  .LBB61_279: # %else1078
+; CHECK-RV64-NEXT:    slli a1, a2, 48
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_280
+; CHECK-RV64-NEXT:    j .LBB61_790
+; CHECK-RV64-NEXT:  .LBB61_280: # %else1082
+; CHECK-RV64-NEXT:    slli a1, a2, 47
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_281
+; CHECK-RV64-NEXT:    j .LBB61_791
+; CHECK-RV64-NEXT:  .LBB61_281: # %else1086
+; CHECK-RV64-NEXT:    slli a1, a2, 46
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_282
+; CHECK-RV64-NEXT:    j .LBB61_792
+; CHECK-RV64-NEXT:  .LBB61_282: # %else1090
+; CHECK-RV64-NEXT:    slli a1, a2, 45
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_283
+; CHECK-RV64-NEXT:    j .LBB61_793
+; CHECK-RV64-NEXT:  .LBB61_283: # %else1094
+; CHECK-RV64-NEXT:    slli a1, a2, 44
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_284
+; CHECK-RV64-NEXT:    j .LBB61_794
+; CHECK-RV64-NEXT:  .LBB61_284: # %else1098
+; CHECK-RV64-NEXT:    slli a1, a2, 43
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_285
+; CHECK-RV64-NEXT:    j .LBB61_795
+; CHECK-RV64-NEXT:  .LBB61_285: # %else1102
+; CHECK-RV64-NEXT:    slli a1, a2, 42
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_286
+; CHECK-RV64-NEXT:    j .LBB61_796
+; CHECK-RV64-NEXT:  .LBB61_286: # %else1106
+; CHECK-RV64-NEXT:    slli a1, a2, 41
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_287
+; CHECK-RV64-NEXT:    j .LBB61_797
+; CHECK-RV64-NEXT:  .LBB61_287: # %else1110
+; CHECK-RV64-NEXT:    slli a1, a2, 40
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_288
+; CHECK-RV64-NEXT:    j .LBB61_798
+; CHECK-RV64-NEXT:  .LBB61_288: # %else1114
+; CHECK-RV64-NEXT:    slli a1, a2, 39
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_289
+; CHECK-RV64-NEXT:    j .LBB61_799
+; CHECK-RV64-NEXT:  .LBB61_289: # %else1118
+; CHECK-RV64-NEXT:    slli a1, a2, 38
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_290
+; CHECK-RV64-NEXT:    j .LBB61_800
+; CHECK-RV64-NEXT:  .LBB61_290: # %else1122
+; CHECK-RV64-NEXT:    slli a1, a2, 37
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_291
+; CHECK-RV64-NEXT:    j .LBB61_801
+; CHECK-RV64-NEXT:  .LBB61_291: # %else1126
+; CHECK-RV64-NEXT:    slli a1, a2, 36
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_292
+; CHECK-RV64-NEXT:    j .LBB61_802
+; CHECK-RV64-NEXT:  .LBB61_292: # %else1130
+; CHECK-RV64-NEXT:    slli a1, a2, 35
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_293
+; CHECK-RV64-NEXT:    j .LBB61_803
+; CHECK-RV64-NEXT:  .LBB61_293: # %else1134
+; CHECK-RV64-NEXT:    slli a1, a2, 34
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_294
+; CHECK-RV64-NEXT:    j .LBB61_804
+; CHECK-RV64-NEXT:  .LBB61_294: # %else1138
+; CHECK-RV64-NEXT:    slli a1, a2, 33
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_295
+; CHECK-RV64-NEXT:    j .LBB61_805
+; CHECK-RV64-NEXT:  .LBB61_295: # %else1142
+; CHECK-RV64-NEXT:    slli a1, a2, 32
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_296
+; CHECK-RV64-NEXT:    j .LBB61_806
+; CHECK-RV64-NEXT:  .LBB61_296: # %else1146
+; CHECK-RV64-NEXT:    slli a1, a2, 31
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_297
+; CHECK-RV64-NEXT:    j .LBB61_807
+; CHECK-RV64-NEXT:  .LBB61_297: # %else1150
+; CHECK-RV64-NEXT:    slli a1, a2, 30
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_298
+; CHECK-RV64-NEXT:    j .LBB61_808
+; CHECK-RV64-NEXT:  .LBB61_298: # %else1154
+; CHECK-RV64-NEXT:    slli a1, a2, 29
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_299
+; CHECK-RV64-NEXT:    j .LBB61_809
+; CHECK-RV64-NEXT:  .LBB61_299: # %else1158
+; CHECK-RV64-NEXT:    slli a1, a2, 28
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_300
+; CHECK-RV64-NEXT:    j .LBB61_810
+; CHECK-RV64-NEXT:  .LBB61_300: # %else1162
+; CHECK-RV64-NEXT:    slli a1, a2, 27
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_301
+; CHECK-RV64-NEXT:    j .LBB61_811
+; CHECK-RV64-NEXT:  .LBB61_301: # %else1166
+; CHECK-RV64-NEXT:    slli a1, a2, 26
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_302
+; CHECK-RV64-NEXT:    j .LBB61_812
+; CHECK-RV64-NEXT:  .LBB61_302: # %else1170
+; CHECK-RV64-NEXT:    slli a1, a2, 25
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_303
+; CHECK-RV64-NEXT:    j .LBB61_813
+; CHECK-RV64-NEXT:  .LBB61_303: # %else1174
+; CHECK-RV64-NEXT:    slli a1, a2, 24
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_304
+; CHECK-RV64-NEXT:    j .LBB61_814
+; CHECK-RV64-NEXT:  .LBB61_304: # %else1178
+; CHECK-RV64-NEXT:    slli a1, a2, 23
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_305
+; CHECK-RV64-NEXT:    j .LBB61_815
+; CHECK-RV64-NEXT:  .LBB61_305: # %else1182
+; CHECK-RV64-NEXT:    slli a1, a2, 22
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_306
+; CHECK-RV64-NEXT:    j .LBB61_816
+; CHECK-RV64-NEXT:  .LBB61_306: # %else1186
+; CHECK-RV64-NEXT:    slli a1, a2, 21
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_307
+; CHECK-RV64-NEXT:    j .LBB61_817
+; CHECK-RV64-NEXT:  .LBB61_307: # %else1190
+; CHECK-RV64-NEXT:    slli a1, a2, 20
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_308
+; CHECK-RV64-NEXT:    j .LBB61_818
+; CHECK-RV64-NEXT:  .LBB61_308: # %else1194
+; CHECK-RV64-NEXT:    slli a1, a2, 19
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_309
+; CHECK-RV64-NEXT:    j .LBB61_819
+; CHECK-RV64-NEXT:  .LBB61_309: # %else1198
+; CHECK-RV64-NEXT:    slli a1, a2, 18
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_310
+; CHECK-RV64-NEXT:    j .LBB61_820
+; CHECK-RV64-NEXT:  .LBB61_310: # %else1202
+; CHECK-RV64-NEXT:    slli a1, a2, 17
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_311
+; CHECK-RV64-NEXT:    j .LBB61_821
+; CHECK-RV64-NEXT:  .LBB61_311: # %else1206
+; CHECK-RV64-NEXT:    slli a1, a2, 16
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_312
+; CHECK-RV64-NEXT:    j .LBB61_822
+; CHECK-RV64-NEXT:  .LBB61_312: # %else1210
+; CHECK-RV64-NEXT:    slli a1, a2, 15
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_313
+; CHECK-RV64-NEXT:    j .LBB61_823
+; CHECK-RV64-NEXT:  .LBB61_313: # %else1214
+; CHECK-RV64-NEXT:    slli a1, a2, 14
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_314
+; CHECK-RV64-NEXT:    j .LBB61_824
+; CHECK-RV64-NEXT:  .LBB61_314: # %else1218
+; CHECK-RV64-NEXT:    slli a1, a2, 13
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_315
+; CHECK-RV64-NEXT:    j .LBB61_825
+; CHECK-RV64-NEXT:  .LBB61_315: # %else1222
+; CHECK-RV64-NEXT:    slli a1, a2, 12
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_316
+; CHECK-RV64-NEXT:    j .LBB61_826
+; CHECK-RV64-NEXT:  .LBB61_316: # %else1226
+; CHECK-RV64-NEXT:    slli a1, a2, 11
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_317
+; CHECK-RV64-NEXT:    j .LBB61_827
+; CHECK-RV64-NEXT:  .LBB61_317: # %else1230
+; CHECK-RV64-NEXT:    slli a1, a2, 10
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_318
+; CHECK-RV64-NEXT:    j .LBB61_828
+; CHECK-RV64-NEXT:  .LBB61_318: # %else1234
+; CHECK-RV64-NEXT:    slli a1, a2, 9
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_319
+; CHECK-RV64-NEXT:    j .LBB61_829
+; CHECK-RV64-NEXT:  .LBB61_319: # %else1238
+; CHECK-RV64-NEXT:    slli a1, a2, 8
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_320
+; CHECK-RV64-NEXT:    j .LBB61_830
+; CHECK-RV64-NEXT:  .LBB61_320: # %else1242
+; CHECK-RV64-NEXT:    slli a1, a2, 7
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_321
+; CHECK-RV64-NEXT:    j .LBB61_831
+; CHECK-RV64-NEXT:  .LBB61_321: # %else1246
+; CHECK-RV64-NEXT:    slli a1, a2, 6
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_322
+; CHECK-RV64-NEXT:    j .LBB61_832
+; CHECK-RV64-NEXT:  .LBB61_322: # %else1250
+; CHECK-RV64-NEXT:    slli a1, a2, 5
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_323
+; CHECK-RV64-NEXT:    j .LBB61_833
+; CHECK-RV64-NEXT:  .LBB61_323: # %else1254
+; CHECK-RV64-NEXT:    slli a1, a2, 4
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_324
+; CHECK-RV64-NEXT:    j .LBB61_834
+; CHECK-RV64-NEXT:  .LBB61_324: # %else1258
+; CHECK-RV64-NEXT:    slli a1, a2, 3
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_325
+; CHECK-RV64-NEXT:    j .LBB61_835
+; CHECK-RV64-NEXT:  .LBB61_325: # %else1262
+; CHECK-RV64-NEXT:    slli a1, a2, 2
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_327
+; CHECK-RV64-NEXT:  .LBB61_326: # %cond.load1265
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 318
+; CHECK-RV64-NEXT:    li a3, 317
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:  .LBB61_327: # %else1266
+; CHECK-RV64-NEXT:    slli a1, a2, 1
+; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV64-NEXT:    vslidedown.vi v16, v0, 5
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_329
+; CHECK-RV64-NEXT:  # %bb.328: # %cond.load1269
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    li a1, 319
+; CHECK-RV64-NEXT:    li a3, 318
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:  .LBB61_329: # %else1270
+; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.x.s a1, v16
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_330
+; CHECK-RV64-NEXT:    j .LBB61_836
+; CHECK-RV64-NEXT:  .LBB61_330: # %else1274
+; CHECK-RV64-NEXT:    andi a2, a1, 1
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_331
+; CHECK-RV64-NEXT:    j .LBB61_837
+; CHECK-RV64-NEXT:  .LBB61_331: # %else1278
+; CHECK-RV64-NEXT:    andi a2, a1, 2
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_332
+; CHECK-RV64-NEXT:    j .LBB61_838
+; CHECK-RV64-NEXT:  .LBB61_332: # %else1282
+; CHECK-RV64-NEXT:    andi a2, a1, 4
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_333
+; CHECK-RV64-NEXT:    j .LBB61_839
+; CHECK-RV64-NEXT:  .LBB61_333: # %else1286
+; CHECK-RV64-NEXT:    andi a2, a1, 8
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_334
+; CHECK-RV64-NEXT:    j .LBB61_840
+; CHECK-RV64-NEXT:  .LBB61_334: # %else1290
+; CHECK-RV64-NEXT:    andi a2, a1, 16
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_335
+; CHECK-RV64-NEXT:    j .LBB61_841
+; CHECK-RV64-NEXT:  .LBB61_335: # %else1294
+; CHECK-RV64-NEXT:    andi a2, a1, 32
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_336
+; CHECK-RV64-NEXT:    j .LBB61_842
+; CHECK-RV64-NEXT:  .LBB61_336: # %else1298
+; CHECK-RV64-NEXT:    andi a2, a1, 64
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_337
+; CHECK-RV64-NEXT:    j .LBB61_843
+; CHECK-RV64-NEXT:  .LBB61_337: # %else1302
+; CHECK-RV64-NEXT:    andi a2, a1, 128
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_338
+; CHECK-RV64-NEXT:    j .LBB61_844
+; CHECK-RV64-NEXT:  .LBB61_338: # %else1306
+; CHECK-RV64-NEXT:    andi a2, a1, 256
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_339
+; CHECK-RV64-NEXT:    j .LBB61_845
+; CHECK-RV64-NEXT:  .LBB61_339: # %else1310
+; CHECK-RV64-NEXT:    andi a2, a1, 512
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_340
+; CHECK-RV64-NEXT:    j .LBB61_846
+; CHECK-RV64-NEXT:  .LBB61_340: # %else1314
+; CHECK-RV64-NEXT:    andi a2, a1, 1024
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_341
+; CHECK-RV64-NEXT:    j .LBB61_847
+; CHECK-RV64-NEXT:  .LBB61_341: # %else1318
+; CHECK-RV64-NEXT:    slli a2, a1, 52
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_342
+; CHECK-RV64-NEXT:    j .LBB61_848
+; CHECK-RV64-NEXT:  .LBB61_342: # %else1322
+; CHECK-RV64-NEXT:    slli a2, a1, 51
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_343
+; CHECK-RV64-NEXT:    j .LBB61_849
+; CHECK-RV64-NEXT:  .LBB61_343: # %else1326
+; CHECK-RV64-NEXT:    slli a2, a1, 50
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_344
+; CHECK-RV64-NEXT:    j .LBB61_850
+; CHECK-RV64-NEXT:  .LBB61_344: # %else1330
+; CHECK-RV64-NEXT:    slli a2, a1, 49
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_345
+; CHECK-RV64-NEXT:    j .LBB61_851
+; CHECK-RV64-NEXT:  .LBB61_345: # %else1334
+; CHECK-RV64-NEXT:    slli a2, a1, 48
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_346
+; CHECK-RV64-NEXT:    j .LBB61_852
+; CHECK-RV64-NEXT:  .LBB61_346: # %else1338
+; CHECK-RV64-NEXT:    slli a2, a1, 47
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_347
+; CHECK-RV64-NEXT:    j .LBB61_853
+; CHECK-RV64-NEXT:  .LBB61_347: # %else1342
+; CHECK-RV64-NEXT:    slli a2, a1, 46
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_348
+; CHECK-RV64-NEXT:    j .LBB61_854
+; CHECK-RV64-NEXT:  .LBB61_348: # %else1346
+; CHECK-RV64-NEXT:    slli a2, a1, 45
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_349
+; CHECK-RV64-NEXT:    j .LBB61_855
+; CHECK-RV64-NEXT:  .LBB61_349: # %else1350
+; CHECK-RV64-NEXT:    slli a2, a1, 44
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_350
+; CHECK-RV64-NEXT:    j .LBB61_856
+; CHECK-RV64-NEXT:  .LBB61_350: # %else1354
+; CHECK-RV64-NEXT:    slli a2, a1, 43
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_351
+; CHECK-RV64-NEXT:    j .LBB61_857
+; CHECK-RV64-NEXT:  .LBB61_351: # %else1358
+; CHECK-RV64-NEXT:    slli a2, a1, 42
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_352
+; CHECK-RV64-NEXT:    j .LBB61_858
+; CHECK-RV64-NEXT:  .LBB61_352: # %else1362
+; CHECK-RV64-NEXT:    slli a2, a1, 41
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_353
+; CHECK-RV64-NEXT:    j .LBB61_859
+; CHECK-RV64-NEXT:  .LBB61_353: # %else1366
+; CHECK-RV64-NEXT:    slli a2, a1, 40
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_354
+; CHECK-RV64-NEXT:    j .LBB61_860
+; CHECK-RV64-NEXT:  .LBB61_354: # %else1370
+; CHECK-RV64-NEXT:    slli a2, a1, 39
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_355
+; CHECK-RV64-NEXT:    j .LBB61_861
+; CHECK-RV64-NEXT:  .LBB61_355: # %else1374
+; CHECK-RV64-NEXT:    slli a2, a1, 38
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_356
+; CHECK-RV64-NEXT:    j .LBB61_862
+; CHECK-RV64-NEXT:  .LBB61_356: # %else1378
+; CHECK-RV64-NEXT:    slli a2, a1, 37
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_357
+; CHECK-RV64-NEXT:    j .LBB61_863
+; CHECK-RV64-NEXT:  .LBB61_357: # %else1382
+; CHECK-RV64-NEXT:    slli a2, a1, 36
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_358
+; CHECK-RV64-NEXT:    j .LBB61_864
+; CHECK-RV64-NEXT:  .LBB61_358: # %else1386
+; CHECK-RV64-NEXT:    slli a2, a1, 35
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_359
+; CHECK-RV64-NEXT:    j .LBB61_865
+; CHECK-RV64-NEXT:  .LBB61_359: # %else1390
+; CHECK-RV64-NEXT:    slli a2, a1, 34
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_360
+; CHECK-RV64-NEXT:    j .LBB61_866
+; CHECK-RV64-NEXT:  .LBB61_360: # %else1394
+; CHECK-RV64-NEXT:    slli a2, a1, 33
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_361
+; CHECK-RV64-NEXT:    j .LBB61_867
+; CHECK-RV64-NEXT:  .LBB61_361: # %else1398
+; CHECK-RV64-NEXT:    slli a2, a1, 32
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_362
+; CHECK-RV64-NEXT:    j .LBB61_868
+; CHECK-RV64-NEXT:  .LBB61_362: # %else1402
+; CHECK-RV64-NEXT:    slli a2, a1, 31
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_363
+; CHECK-RV64-NEXT:    j .LBB61_869
+; CHECK-RV64-NEXT:  .LBB61_363: # %else1406
+; CHECK-RV64-NEXT:    slli a2, a1, 30
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_364
+; CHECK-RV64-NEXT:    j .LBB61_870
+; CHECK-RV64-NEXT:  .LBB61_364: # %else1410
+; CHECK-RV64-NEXT:    slli a2, a1, 29
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_365
+; CHECK-RV64-NEXT:    j .LBB61_871
+; CHECK-RV64-NEXT:  .LBB61_365: # %else1414
+; CHECK-RV64-NEXT:    slli a2, a1, 28
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_366
+; CHECK-RV64-NEXT:    j .LBB61_872
+; CHECK-RV64-NEXT:  .LBB61_366: # %else1418
+; CHECK-RV64-NEXT:    slli a2, a1, 27
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_367
+; CHECK-RV64-NEXT:    j .LBB61_873
+; CHECK-RV64-NEXT:  .LBB61_367: # %else1422
+; CHECK-RV64-NEXT:    slli a2, a1, 26
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_368
+; CHECK-RV64-NEXT:    j .LBB61_874
+; CHECK-RV64-NEXT:  .LBB61_368: # %else1426
+; CHECK-RV64-NEXT:    slli a2, a1, 25
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_369
+; CHECK-RV64-NEXT:    j .LBB61_875
+; CHECK-RV64-NEXT:  .LBB61_369: # %else1430
+; CHECK-RV64-NEXT:    slli a2, a1, 24
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_370
+; CHECK-RV64-NEXT:    j .LBB61_876
+; CHECK-RV64-NEXT:  .LBB61_370: # %else1434
+; CHECK-RV64-NEXT:    slli a2, a1, 23
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_371
+; CHECK-RV64-NEXT:    j .LBB61_877
+; CHECK-RV64-NEXT:  .LBB61_371: # %else1438
+; CHECK-RV64-NEXT:    slli a2, a1, 22
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_372
+; CHECK-RV64-NEXT:    j .LBB61_878
+; CHECK-RV64-NEXT:  .LBB61_372: # %else1442
+; CHECK-RV64-NEXT:    slli a2, a1, 21
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_373
+; CHECK-RV64-NEXT:    j .LBB61_879
+; CHECK-RV64-NEXT:  .LBB61_373: # %else1446
+; CHECK-RV64-NEXT:    slli a2, a1, 20
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_374
+; CHECK-RV64-NEXT:    j .LBB61_880
+; CHECK-RV64-NEXT:  .LBB61_374: # %else1450
+; CHECK-RV64-NEXT:    slli a2, a1, 19
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_375
+; CHECK-RV64-NEXT:    j .LBB61_881
+; CHECK-RV64-NEXT:  .LBB61_375: # %else1454
+; CHECK-RV64-NEXT:    slli a2, a1, 18
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_376
+; CHECK-RV64-NEXT:    j .LBB61_882
+; CHECK-RV64-NEXT:  .LBB61_376: # %else1458
+; CHECK-RV64-NEXT:    slli a2, a1, 17
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_377
+; CHECK-RV64-NEXT:    j .LBB61_883
+; CHECK-RV64-NEXT:  .LBB61_377: # %else1462
+; CHECK-RV64-NEXT:    slli a2, a1, 16
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_378
+; CHECK-RV64-NEXT:    j .LBB61_884
+; CHECK-RV64-NEXT:  .LBB61_378: # %else1466
+; CHECK-RV64-NEXT:    slli a2, a1, 15
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_379
+; CHECK-RV64-NEXT:    j .LBB61_885
+; CHECK-RV64-NEXT:  .LBB61_379: # %else1470
+; CHECK-RV64-NEXT:    slli a2, a1, 14
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_380
+; CHECK-RV64-NEXT:    j .LBB61_886
+; CHECK-RV64-NEXT:  .LBB61_380: # %else1474
+; CHECK-RV64-NEXT:    slli a2, a1, 13
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_381
+; CHECK-RV64-NEXT:    j .LBB61_887
+; CHECK-RV64-NEXT:  .LBB61_381: # %else1478
+; CHECK-RV64-NEXT:    slli a2, a1, 12
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_382
+; CHECK-RV64-NEXT:    j .LBB61_888
+; CHECK-RV64-NEXT:  .LBB61_382: # %else1482
+; CHECK-RV64-NEXT:    slli a2, a1, 11
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_383
+; CHECK-RV64-NEXT:    j .LBB61_889
+; CHECK-RV64-NEXT:  .LBB61_383: # %else1486
+; CHECK-RV64-NEXT:    slli a2, a1, 10
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_384
+; CHECK-RV64-NEXT:    j .LBB61_890
+; CHECK-RV64-NEXT:  .LBB61_384: # %else1490
+; CHECK-RV64-NEXT:    slli a2, a1, 9
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_385
+; CHECK-RV64-NEXT:    j .LBB61_891
+; CHECK-RV64-NEXT:  .LBB61_385: # %else1494
+; CHECK-RV64-NEXT:    slli a2, a1, 8
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_386
+; CHECK-RV64-NEXT:    j .LBB61_892
+; CHECK-RV64-NEXT:  .LBB61_386: # %else1498
+; CHECK-RV64-NEXT:    slli a2, a1, 7
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_387
+; CHECK-RV64-NEXT:    j .LBB61_893
+; CHECK-RV64-NEXT:  .LBB61_387: # %else1502
+; CHECK-RV64-NEXT:    slli a2, a1, 6
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_388
+; CHECK-RV64-NEXT:    j .LBB61_894
+; CHECK-RV64-NEXT:  .LBB61_388: # %else1506
+; CHECK-RV64-NEXT:    slli a2, a1, 5
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_389
+; CHECK-RV64-NEXT:    j .LBB61_895
+; CHECK-RV64-NEXT:  .LBB61_389: # %else1510
+; CHECK-RV64-NEXT:    slli a2, a1, 4
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_390
+; CHECK-RV64-NEXT:    j .LBB61_896
+; CHECK-RV64-NEXT:  .LBB61_390: # %else1514
+; CHECK-RV64-NEXT:    slli a2, a1, 3
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_391
+; CHECK-RV64-NEXT:    j .LBB61_897
+; CHECK-RV64-NEXT:  .LBB61_391: # %else1518
+; CHECK-RV64-NEXT:    slli a2, a1, 2
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_393
+; CHECK-RV64-NEXT:  .LBB61_392: # %cond.load1521
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 382
+; CHECK-RV64-NEXT:    li a3, 381
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:  .LBB61_393: # %else1522
+; CHECK-RV64-NEXT:    slli a2, a1, 1
+; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV64-NEXT:    vslidedown.vi v16, v0, 6
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_395
+; CHECK-RV64-NEXT:  # %bb.394: # %cond.load1525
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    li a2, 383
+; CHECK-RV64-NEXT:    li a3, 382
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:  .LBB61_395: # %else1526
+; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.x.s a2, v16
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_396
+; CHECK-RV64-NEXT:    j .LBB61_898
+; CHECK-RV64-NEXT:  .LBB61_396: # %else1530
+; CHECK-RV64-NEXT:    andi a1, a2, 1
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_397
+; CHECK-RV64-NEXT:    j .LBB61_899
+; CHECK-RV64-NEXT:  .LBB61_397: # %else1534
+; CHECK-RV64-NEXT:    andi a1, a2, 2
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_398
+; CHECK-RV64-NEXT:    j .LBB61_900
+; CHECK-RV64-NEXT:  .LBB61_398: # %else1538
+; CHECK-RV64-NEXT:    andi a1, a2, 4
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_399
+; CHECK-RV64-NEXT:    j .LBB61_901
+; CHECK-RV64-NEXT:  .LBB61_399: # %else1542
+; CHECK-RV64-NEXT:    andi a1, a2, 8
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_400
+; CHECK-RV64-NEXT:    j .LBB61_902
+; CHECK-RV64-NEXT:  .LBB61_400: # %else1546
+; CHECK-RV64-NEXT:    andi a1, a2, 16
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_401
+; CHECK-RV64-NEXT:    j .LBB61_903
+; CHECK-RV64-NEXT:  .LBB61_401: # %else1550
+; CHECK-RV64-NEXT:    andi a1, a2, 32
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_402
+; CHECK-RV64-NEXT:    j .LBB61_904
+; CHECK-RV64-NEXT:  .LBB61_402: # %else1554
+; CHECK-RV64-NEXT:    andi a1, a2, 64
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_403
+; CHECK-RV64-NEXT:    j .LBB61_905
+; CHECK-RV64-NEXT:  .LBB61_403: # %else1558
+; CHECK-RV64-NEXT:    andi a1, a2, 128
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_404
+; CHECK-RV64-NEXT:    j .LBB61_906
+; CHECK-RV64-NEXT:  .LBB61_404: # %else1562
+; CHECK-RV64-NEXT:    andi a1, a2, 256
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_405
+; CHECK-RV64-NEXT:    j .LBB61_907
+; CHECK-RV64-NEXT:  .LBB61_405: # %else1566
+; CHECK-RV64-NEXT:    andi a1, a2, 512
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_406
+; CHECK-RV64-NEXT:    j .LBB61_908
+; CHECK-RV64-NEXT:  .LBB61_406: # %else1570
+; CHECK-RV64-NEXT:    andi a1, a2, 1024
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_407
+; CHECK-RV64-NEXT:    j .LBB61_909
+; CHECK-RV64-NEXT:  .LBB61_407: # %else1574
+; CHECK-RV64-NEXT:    slli a1, a2, 52
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_408
+; CHECK-RV64-NEXT:    j .LBB61_910
+; CHECK-RV64-NEXT:  .LBB61_408: # %else1578
+; CHECK-RV64-NEXT:    slli a1, a2, 51
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_409
+; CHECK-RV64-NEXT:    j .LBB61_911
+; CHECK-RV64-NEXT:  .LBB61_409: # %else1582
+; CHECK-RV64-NEXT:    slli a1, a2, 50
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_410
+; CHECK-RV64-NEXT:    j .LBB61_912
+; CHECK-RV64-NEXT:  .LBB61_410: # %else1586
+; CHECK-RV64-NEXT:    slli a1, a2, 49
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_411
+; CHECK-RV64-NEXT:    j .LBB61_913
+; CHECK-RV64-NEXT:  .LBB61_411: # %else1590
+; CHECK-RV64-NEXT:    slli a1, a2, 48
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_412
+; CHECK-RV64-NEXT:    j .LBB61_914
+; CHECK-RV64-NEXT:  .LBB61_412: # %else1594
+; CHECK-RV64-NEXT:    slli a1, a2, 47
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_413
+; CHECK-RV64-NEXT:    j .LBB61_915
+; CHECK-RV64-NEXT:  .LBB61_413: # %else1598
+; CHECK-RV64-NEXT:    slli a1, a2, 46
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_414
+; CHECK-RV64-NEXT:    j .LBB61_916
+; CHECK-RV64-NEXT:  .LBB61_414: # %else1602
+; CHECK-RV64-NEXT:    slli a1, a2, 45
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_415
+; CHECK-RV64-NEXT:    j .LBB61_917
+; CHECK-RV64-NEXT:  .LBB61_415: # %else1606
+; CHECK-RV64-NEXT:    slli a1, a2, 44
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_416
+; CHECK-RV64-NEXT:    j .LBB61_918
+; CHECK-RV64-NEXT:  .LBB61_416: # %else1610
+; CHECK-RV64-NEXT:    slli a1, a2, 43
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_417
+; CHECK-RV64-NEXT:    j .LBB61_919
+; CHECK-RV64-NEXT:  .LBB61_417: # %else1614
+; CHECK-RV64-NEXT:    slli a1, a2, 42
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_418
+; CHECK-RV64-NEXT:    j .LBB61_920
+; CHECK-RV64-NEXT:  .LBB61_418: # %else1618
+; CHECK-RV64-NEXT:    slli a1, a2, 41
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_419
+; CHECK-RV64-NEXT:    j .LBB61_921
+; CHECK-RV64-NEXT:  .LBB61_419: # %else1622
+; CHECK-RV64-NEXT:    slli a1, a2, 40
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_420
+; CHECK-RV64-NEXT:    j .LBB61_922
+; CHECK-RV64-NEXT:  .LBB61_420: # %else1626
+; CHECK-RV64-NEXT:    slli a1, a2, 39
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_421
+; CHECK-RV64-NEXT:    j .LBB61_923
+; CHECK-RV64-NEXT:  .LBB61_421: # %else1630
+; CHECK-RV64-NEXT:    slli a1, a2, 38
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_422
+; CHECK-RV64-NEXT:    j .LBB61_924
+; CHECK-RV64-NEXT:  .LBB61_422: # %else1634
+; CHECK-RV64-NEXT:    slli a1, a2, 37
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_423
+; CHECK-RV64-NEXT:    j .LBB61_925
+; CHECK-RV64-NEXT:  .LBB61_423: # %else1638
+; CHECK-RV64-NEXT:    slli a1, a2, 36
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_424
+; CHECK-RV64-NEXT:    j .LBB61_926
+; CHECK-RV64-NEXT:  .LBB61_424: # %else1642
+; CHECK-RV64-NEXT:    slli a1, a2, 35
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_425
+; CHECK-RV64-NEXT:    j .LBB61_927
+; CHECK-RV64-NEXT:  .LBB61_425: # %else1646
+; CHECK-RV64-NEXT:    slli a1, a2, 34
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_426
+; CHECK-RV64-NEXT:    j .LBB61_928
+; CHECK-RV64-NEXT:  .LBB61_426: # %else1650
+; CHECK-RV64-NEXT:    slli a1, a2, 33
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_427
+; CHECK-RV64-NEXT:    j .LBB61_929
+; CHECK-RV64-NEXT:  .LBB61_427: # %else1654
+; CHECK-RV64-NEXT:    slli a1, a2, 32
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_428
+; CHECK-RV64-NEXT:    j .LBB61_930
+; CHECK-RV64-NEXT:  .LBB61_428: # %else1658
+; CHECK-RV64-NEXT:    slli a1, a2, 31
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_429
+; CHECK-RV64-NEXT:    j .LBB61_931
+; CHECK-RV64-NEXT:  .LBB61_429: # %else1662
+; CHECK-RV64-NEXT:    slli a1, a2, 30
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_430
+; CHECK-RV64-NEXT:    j .LBB61_932
+; CHECK-RV64-NEXT:  .LBB61_430: # %else1666
+; CHECK-RV64-NEXT:    slli a1, a2, 29
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_431
+; CHECK-RV64-NEXT:    j .LBB61_933
+; CHECK-RV64-NEXT:  .LBB61_431: # %else1670
+; CHECK-RV64-NEXT:    slli a1, a2, 28
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_432
+; CHECK-RV64-NEXT:    j .LBB61_934
+; CHECK-RV64-NEXT:  .LBB61_432: # %else1674
+; CHECK-RV64-NEXT:    slli a1, a2, 27
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_433
+; CHECK-RV64-NEXT:    j .LBB61_935
+; CHECK-RV64-NEXT:  .LBB61_433: # %else1678
+; CHECK-RV64-NEXT:    slli a1, a2, 26
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_434
+; CHECK-RV64-NEXT:    j .LBB61_936
+; CHECK-RV64-NEXT:  .LBB61_434: # %else1682
+; CHECK-RV64-NEXT:    slli a1, a2, 25
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_435
+; CHECK-RV64-NEXT:    j .LBB61_937
+; CHECK-RV64-NEXT:  .LBB61_435: # %else1686
+; CHECK-RV64-NEXT:    slli a1, a2, 24
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_436
+; CHECK-RV64-NEXT:    j .LBB61_938
+; CHECK-RV64-NEXT:  .LBB61_436: # %else1690
+; CHECK-RV64-NEXT:    slli a1, a2, 23
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_437
+; CHECK-RV64-NEXT:    j .LBB61_939
+; CHECK-RV64-NEXT:  .LBB61_437: # %else1694
+; CHECK-RV64-NEXT:    slli a1, a2, 22
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_438
+; CHECK-RV64-NEXT:    j .LBB61_940
+; CHECK-RV64-NEXT:  .LBB61_438: # %else1698
+; CHECK-RV64-NEXT:    slli a1, a2, 21
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_439
+; CHECK-RV64-NEXT:    j .LBB61_941
+; CHECK-RV64-NEXT:  .LBB61_439: # %else1702
+; CHECK-RV64-NEXT:    slli a1, a2, 20
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_440
+; CHECK-RV64-NEXT:    j .LBB61_942
+; CHECK-RV64-NEXT:  .LBB61_440: # %else1706
+; CHECK-RV64-NEXT:    slli a1, a2, 19
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_441
+; CHECK-RV64-NEXT:    j .LBB61_943
+; CHECK-RV64-NEXT:  .LBB61_441: # %else1710
+; CHECK-RV64-NEXT:    slli a1, a2, 18
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_442
+; CHECK-RV64-NEXT:    j .LBB61_944
+; CHECK-RV64-NEXT:  .LBB61_442: # %else1714
+; CHECK-RV64-NEXT:    slli a1, a2, 17
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_443
+; CHECK-RV64-NEXT:    j .LBB61_945
+; CHECK-RV64-NEXT:  .LBB61_443: # %else1718
+; CHECK-RV64-NEXT:    slli a1, a2, 16
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_444
+; CHECK-RV64-NEXT:    j .LBB61_946
+; CHECK-RV64-NEXT:  .LBB61_444: # %else1722
+; CHECK-RV64-NEXT:    slli a1, a2, 15
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_445
+; CHECK-RV64-NEXT:    j .LBB61_947
+; CHECK-RV64-NEXT:  .LBB61_445: # %else1726
+; CHECK-RV64-NEXT:    slli a1, a2, 14
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_446
+; CHECK-RV64-NEXT:    j .LBB61_948
+; CHECK-RV64-NEXT:  .LBB61_446: # %else1730
+; CHECK-RV64-NEXT:    slli a1, a2, 13
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_447
+; CHECK-RV64-NEXT:    j .LBB61_949
+; CHECK-RV64-NEXT:  .LBB61_447: # %else1734
+; CHECK-RV64-NEXT:    slli a1, a2, 12
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_448
+; CHECK-RV64-NEXT:    j .LBB61_950
+; CHECK-RV64-NEXT:  .LBB61_448: # %else1738
+; CHECK-RV64-NEXT:    slli a1, a2, 11
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_449
+; CHECK-RV64-NEXT:    j .LBB61_951
+; CHECK-RV64-NEXT:  .LBB61_449: # %else1742
+; CHECK-RV64-NEXT:    slli a1, a2, 10
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_450
+; CHECK-RV64-NEXT:    j .LBB61_952
+; CHECK-RV64-NEXT:  .LBB61_450: # %else1746
+; CHECK-RV64-NEXT:    slli a1, a2, 9
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_451
+; CHECK-RV64-NEXT:    j .LBB61_953
+; CHECK-RV64-NEXT:  .LBB61_451: # %else1750
+; CHECK-RV64-NEXT:    slli a1, a2, 8
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_452
+; CHECK-RV64-NEXT:    j .LBB61_954
+; CHECK-RV64-NEXT:  .LBB61_452: # %else1754
+; CHECK-RV64-NEXT:    slli a1, a2, 7
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_453
+; CHECK-RV64-NEXT:    j .LBB61_955
+; CHECK-RV64-NEXT:  .LBB61_453: # %else1758
+; CHECK-RV64-NEXT:    slli a1, a2, 6
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_454
+; CHECK-RV64-NEXT:    j .LBB61_956
+; CHECK-RV64-NEXT:  .LBB61_454: # %else1762
+; CHECK-RV64-NEXT:    slli a1, a2, 5
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_455
+; CHECK-RV64-NEXT:    j .LBB61_957
+; CHECK-RV64-NEXT:  .LBB61_455: # %else1766
+; CHECK-RV64-NEXT:    slli a1, a2, 4
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_456
+; CHECK-RV64-NEXT:    j .LBB61_958
+; CHECK-RV64-NEXT:  .LBB61_456: # %else1770
+; CHECK-RV64-NEXT:    slli a1, a2, 3
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_457
+; CHECK-RV64-NEXT:    j .LBB61_959
+; CHECK-RV64-NEXT:  .LBB61_457: # %else1774
+; CHECK-RV64-NEXT:    slli a1, a2, 2
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_459
+; CHECK-RV64-NEXT:  .LBB61_458: # %cond.load1777
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 446
+; CHECK-RV64-NEXT:    li a3, 445
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:  .LBB61_459: # %else1778
+; CHECK-RV64-NEXT:    slli a1, a2, 1
+; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV64-NEXT:    vslidedown.vi v16, v0, 7
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_461
+; CHECK-RV64-NEXT:  # %bb.460: # %cond.load1781
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    li a1, 447
+; CHECK-RV64-NEXT:    li a3, 446
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:  .LBB61_461: # %else1782
+; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.x.s a1, v16
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_462
+; CHECK-RV64-NEXT:    j .LBB61_960
+; CHECK-RV64-NEXT:  .LBB61_462: # %else1786
+; CHECK-RV64-NEXT:    andi a2, a1, 1
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_463
+; CHECK-RV64-NEXT:    j .LBB61_961
+; CHECK-RV64-NEXT:  .LBB61_463: # %else1790
+; CHECK-RV64-NEXT:    andi a2, a1, 2
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_464
+; CHECK-RV64-NEXT:    j .LBB61_962
+; CHECK-RV64-NEXT:  .LBB61_464: # %else1794
+; CHECK-RV64-NEXT:    andi a2, a1, 4
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_465
+; CHECK-RV64-NEXT:    j .LBB61_963
+; CHECK-RV64-NEXT:  .LBB61_465: # %else1798
+; CHECK-RV64-NEXT:    andi a2, a1, 8
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_466
+; CHECK-RV64-NEXT:    j .LBB61_964
+; CHECK-RV64-NEXT:  .LBB61_466: # %else1802
+; CHECK-RV64-NEXT:    andi a2, a1, 16
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_467
+; CHECK-RV64-NEXT:    j .LBB61_965
+; CHECK-RV64-NEXT:  .LBB61_467: # %else1806
+; CHECK-RV64-NEXT:    andi a2, a1, 32
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_468
+; CHECK-RV64-NEXT:    j .LBB61_966
+; CHECK-RV64-NEXT:  .LBB61_468: # %else1810
+; CHECK-RV64-NEXT:    andi a2, a1, 64
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_469
+; CHECK-RV64-NEXT:    j .LBB61_967
+; CHECK-RV64-NEXT:  .LBB61_469: # %else1814
+; CHECK-RV64-NEXT:    andi a2, a1, 128
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_470
+; CHECK-RV64-NEXT:    j .LBB61_968
+; CHECK-RV64-NEXT:  .LBB61_470: # %else1818
+; CHECK-RV64-NEXT:    andi a2, a1, 256
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_471
+; CHECK-RV64-NEXT:    j .LBB61_969
+; CHECK-RV64-NEXT:  .LBB61_471: # %else1822
+; CHECK-RV64-NEXT:    andi a2, a1, 512
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_472
+; CHECK-RV64-NEXT:    j .LBB61_970
+; CHECK-RV64-NEXT:  .LBB61_472: # %else1826
+; CHECK-RV64-NEXT:    andi a2, a1, 1024
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_473
+; CHECK-RV64-NEXT:    j .LBB61_971
+; CHECK-RV64-NEXT:  .LBB61_473: # %else1830
+; CHECK-RV64-NEXT:    slli a2, a1, 52
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_474
+; CHECK-RV64-NEXT:    j .LBB61_972
+; CHECK-RV64-NEXT:  .LBB61_474: # %else1834
+; CHECK-RV64-NEXT:    slli a2, a1, 51
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_475
+; CHECK-RV64-NEXT:    j .LBB61_973
+; CHECK-RV64-NEXT:  .LBB61_475: # %else1838
+; CHECK-RV64-NEXT:    slli a2, a1, 50
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_476
+; CHECK-RV64-NEXT:    j .LBB61_974
+; CHECK-RV64-NEXT:  .LBB61_476: # %else1842
+; CHECK-RV64-NEXT:    slli a2, a1, 49
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_477
+; CHECK-RV64-NEXT:    j .LBB61_975
+; CHECK-RV64-NEXT:  .LBB61_477: # %else1846
+; CHECK-RV64-NEXT:    slli a2, a1, 48
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_478
+; CHECK-RV64-NEXT:    j .LBB61_976
+; CHECK-RV64-NEXT:  .LBB61_478: # %else1850
+; CHECK-RV64-NEXT:    slli a2, a1, 47
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_479
+; CHECK-RV64-NEXT:    j .LBB61_977
+; CHECK-RV64-NEXT:  .LBB61_479: # %else1854
+; CHECK-RV64-NEXT:    slli a2, a1, 46
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_480
+; CHECK-RV64-NEXT:    j .LBB61_978
+; CHECK-RV64-NEXT:  .LBB61_480: # %else1858
+; CHECK-RV64-NEXT:    slli a2, a1, 45
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_481
+; CHECK-RV64-NEXT:    j .LBB61_979
+; CHECK-RV64-NEXT:  .LBB61_481: # %else1862
+; CHECK-RV64-NEXT:    slli a2, a1, 44
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_482
+; CHECK-RV64-NEXT:    j .LBB61_980
+; CHECK-RV64-NEXT:  .LBB61_482: # %else1866
+; CHECK-RV64-NEXT:    slli a2, a1, 43
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_483
+; CHECK-RV64-NEXT:    j .LBB61_981
+; CHECK-RV64-NEXT:  .LBB61_483: # %else1870
+; CHECK-RV64-NEXT:    slli a2, a1, 42
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_484
+; CHECK-RV64-NEXT:    j .LBB61_982
+; CHECK-RV64-NEXT:  .LBB61_484: # %else1874
+; CHECK-RV64-NEXT:    slli a2, a1, 41
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_485
+; CHECK-RV64-NEXT:    j .LBB61_983
+; CHECK-RV64-NEXT:  .LBB61_485: # %else1878
+; CHECK-RV64-NEXT:    slli a2, a1, 40
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_486
+; CHECK-RV64-NEXT:    j .LBB61_984
+; CHECK-RV64-NEXT:  .LBB61_486: # %else1882
+; CHECK-RV64-NEXT:    slli a2, a1, 39
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_487
+; CHECK-RV64-NEXT:    j .LBB61_985
+; CHECK-RV64-NEXT:  .LBB61_487: # %else1886
+; CHECK-RV64-NEXT:    slli a2, a1, 38
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_488
+; CHECK-RV64-NEXT:    j .LBB61_986
+; CHECK-RV64-NEXT:  .LBB61_488: # %else1890
+; CHECK-RV64-NEXT:    slli a2, a1, 37
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_489
+; CHECK-RV64-NEXT:    j .LBB61_987
+; CHECK-RV64-NEXT:  .LBB61_489: # %else1894
+; CHECK-RV64-NEXT:    slli a2, a1, 36
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_490
+; CHECK-RV64-NEXT:    j .LBB61_988
+; CHECK-RV64-NEXT:  .LBB61_490: # %else1898
+; CHECK-RV64-NEXT:    slli a2, a1, 35
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_491
+; CHECK-RV64-NEXT:    j .LBB61_989
+; CHECK-RV64-NEXT:  .LBB61_491: # %else1902
+; CHECK-RV64-NEXT:    slli a2, a1, 34
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_492
+; CHECK-RV64-NEXT:    j .LBB61_990
+; CHECK-RV64-NEXT:  .LBB61_492: # %else1906
+; CHECK-RV64-NEXT:    slli a2, a1, 33
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_493
+; CHECK-RV64-NEXT:    j .LBB61_991
+; CHECK-RV64-NEXT:  .LBB61_493: # %else1910
+; CHECK-RV64-NEXT:    slli a2, a1, 32
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_494
+; CHECK-RV64-NEXT:    j .LBB61_992
+; CHECK-RV64-NEXT:  .LBB61_494: # %else1914
+; CHECK-RV64-NEXT:    slli a2, a1, 31
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_495
+; CHECK-RV64-NEXT:    j .LBB61_993
+; CHECK-RV64-NEXT:  .LBB61_495: # %else1918
+; CHECK-RV64-NEXT:    slli a2, a1, 30
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_496
+; CHECK-RV64-NEXT:    j .LBB61_994
+; CHECK-RV64-NEXT:  .LBB61_496: # %else1922
+; CHECK-RV64-NEXT:    slli a2, a1, 29
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_497
+; CHECK-RV64-NEXT:    j .LBB61_995
+; CHECK-RV64-NEXT:  .LBB61_497: # %else1926
+; CHECK-RV64-NEXT:    slli a2, a1, 28
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_498
+; CHECK-RV64-NEXT:    j .LBB61_996
+; CHECK-RV64-NEXT:  .LBB61_498: # %else1930
+; CHECK-RV64-NEXT:    slli a2, a1, 27
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_499
+; CHECK-RV64-NEXT:    j .LBB61_997
+; CHECK-RV64-NEXT:  .LBB61_499: # %else1934
+; CHECK-RV64-NEXT:    slli a2, a1, 26
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_500
+; CHECK-RV64-NEXT:    j .LBB61_998
+; CHECK-RV64-NEXT:  .LBB61_500: # %else1938
+; CHECK-RV64-NEXT:    slli a2, a1, 25
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_501
+; CHECK-RV64-NEXT:    j .LBB61_999
+; CHECK-RV64-NEXT:  .LBB61_501: # %else1942
+; CHECK-RV64-NEXT:    slli a2, a1, 24
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_502
+; CHECK-RV64-NEXT:    j .LBB61_1000
+; CHECK-RV64-NEXT:  .LBB61_502: # %else1946
+; CHECK-RV64-NEXT:    slli a2, a1, 23
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_503
+; CHECK-RV64-NEXT:    j .LBB61_1001
+; CHECK-RV64-NEXT:  .LBB61_503: # %else1950
+; CHECK-RV64-NEXT:    slli a2, a1, 22
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_504
+; CHECK-RV64-NEXT:    j .LBB61_1002
+; CHECK-RV64-NEXT:  .LBB61_504: # %else1954
+; CHECK-RV64-NEXT:    slli a2, a1, 21
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_505
+; CHECK-RV64-NEXT:    j .LBB61_1003
+; CHECK-RV64-NEXT:  .LBB61_505: # %else1958
+; CHECK-RV64-NEXT:    slli a2, a1, 20
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_506
+; CHECK-RV64-NEXT:    j .LBB61_1004
+; CHECK-RV64-NEXT:  .LBB61_506: # %else1962
+; CHECK-RV64-NEXT:    slli a2, a1, 19
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_507
+; CHECK-RV64-NEXT:    j .LBB61_1005
+; CHECK-RV64-NEXT:  .LBB61_507: # %else1966
+; CHECK-RV64-NEXT:    slli a2, a1, 18
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_508
+; CHECK-RV64-NEXT:    j .LBB61_1006
+; CHECK-RV64-NEXT:  .LBB61_508: # %else1970
+; CHECK-RV64-NEXT:    slli a2, a1, 17
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_509
+; CHECK-RV64-NEXT:    j .LBB61_1007
+; CHECK-RV64-NEXT:  .LBB61_509: # %else1974
+; CHECK-RV64-NEXT:    slli a2, a1, 16
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_510
+; CHECK-RV64-NEXT:    j .LBB61_1008
+; CHECK-RV64-NEXT:  .LBB61_510: # %else1978
+; CHECK-RV64-NEXT:    slli a2, a1, 15
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_511
+; CHECK-RV64-NEXT:    j .LBB61_1009
+; CHECK-RV64-NEXT:  .LBB61_511: # %else1982
+; CHECK-RV64-NEXT:    slli a2, a1, 14
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_512
+; CHECK-RV64-NEXT:    j .LBB61_1010
+; CHECK-RV64-NEXT:  .LBB61_512: # %else1986
+; CHECK-RV64-NEXT:    slli a2, a1, 13
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_513
+; CHECK-RV64-NEXT:    j .LBB61_1011
+; CHECK-RV64-NEXT:  .LBB61_513: # %else1990
+; CHECK-RV64-NEXT:    slli a2, a1, 12
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_514
+; CHECK-RV64-NEXT:    j .LBB61_1012
+; CHECK-RV64-NEXT:  .LBB61_514: # %else1994
+; CHECK-RV64-NEXT:    slli a2, a1, 11
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_515
+; CHECK-RV64-NEXT:    j .LBB61_1013
+; CHECK-RV64-NEXT:  .LBB61_515: # %else1998
+; CHECK-RV64-NEXT:    slli a2, a1, 10
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_516
+; CHECK-RV64-NEXT:    j .LBB61_1014
+; CHECK-RV64-NEXT:  .LBB61_516: # %else2002
+; CHECK-RV64-NEXT:    slli a2, a1, 9
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_517
+; CHECK-RV64-NEXT:    j .LBB61_1015
+; CHECK-RV64-NEXT:  .LBB61_517: # %else2006
+; CHECK-RV64-NEXT:    slli a2, a1, 8
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_518
+; CHECK-RV64-NEXT:    j .LBB61_1016
+; CHECK-RV64-NEXT:  .LBB61_518: # %else2010
+; CHECK-RV64-NEXT:    slli a2, a1, 7
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_519
+; CHECK-RV64-NEXT:    j .LBB61_1017
+; CHECK-RV64-NEXT:  .LBB61_519: # %else2014
+; CHECK-RV64-NEXT:    slli a2, a1, 6
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_520
+; CHECK-RV64-NEXT:    j .LBB61_1018
+; CHECK-RV64-NEXT:  .LBB61_520: # %else2018
+; CHECK-RV64-NEXT:    slli a2, a1, 5
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_521
+; CHECK-RV64-NEXT:    j .LBB61_1019
+; CHECK-RV64-NEXT:  .LBB61_521: # %else2022
+; CHECK-RV64-NEXT:    slli a2, a1, 4
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_522
+; CHECK-RV64-NEXT:    j .LBB61_1020
+; CHECK-RV64-NEXT:  .LBB61_522: # %else2026
+; CHECK-RV64-NEXT:    slli a2, a1, 3
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_523
+; CHECK-RV64-NEXT:    j .LBB61_1021
+; CHECK-RV64-NEXT:  .LBB61_523: # %else2030
+; CHECK-RV64-NEXT:    slli a2, a1, 2
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_524
+; CHECK-RV64-NEXT:    j .LBB61_1022
+; CHECK-RV64-NEXT:  .LBB61_524: # %else2034
+; CHECK-RV64-NEXT:    slli a2, a1, 1
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_525
+; CHECK-RV64-NEXT:    j .LBB61_1023
+; CHECK-RV64-NEXT:  .LBB61_525: # %else2038
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_526
+; CHECK-RV64-NEXT:    j .LBB61_1024
+; CHECK-RV64-NEXT:  .LBB61_526: # %else2042
+; CHECK-RV64-NEXT:    ret
+; CHECK-RV64-NEXT:  .LBB61_527: # %cond.load
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    vsetvli zero, zero, e8, mf8, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v8, a1
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    andi a1, a2, 2
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_528
+; CHECK-RV64-NEXT:    j .LBB61_2
+; CHECK-RV64-NEXT:  .LBB61_528: # %cond.load1
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetvli zero, zero, e8, mf8, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vsetivli zero, 2, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 1
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a1, a2, 4
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_529
+; CHECK-RV64-NEXT:    j .LBB61_3
+; CHECK-RV64-NEXT:  .LBB61_529: # %cond.load5
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 3, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 2
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a1, a2, 8
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_530
+; CHECK-RV64-NEXT:    j .LBB61_4
+; CHECK-RV64-NEXT:  .LBB61_530: # %cond.load9
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 4, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a1, a2, 16
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_531
+; CHECK-RV64-NEXT:    j .LBB61_5
+; CHECK-RV64-NEXT:  .LBB61_531: # %cond.load13
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 5, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 4
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a1, a2, 32
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_532
+; CHECK-RV64-NEXT:    j .LBB61_6
+; CHECK-RV64-NEXT:  .LBB61_532: # %cond.load17
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 6, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 5
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a1, a2, 64
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_533
+; CHECK-RV64-NEXT:    j .LBB61_7
+; CHECK-RV64-NEXT:  .LBB61_533: # %cond.load21
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 7, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 6
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a1, a2, 128
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_534
+; CHECK-RV64-NEXT:    j .LBB61_8
+; CHECK-RV64-NEXT:  .LBB61_534: # %cond.load25
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 8, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 7
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a1, a2, 256
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_535
+; CHECK-RV64-NEXT:    j .LBB61_9
+; CHECK-RV64-NEXT:  .LBB61_535: # %cond.load29
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 9, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 8
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a1, a2, 512
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_536
+; CHECK-RV64-NEXT:    j .LBB61_10
+; CHECK-RV64-NEXT:  .LBB61_536: # %cond.load33
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 10, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 9
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a1, a2, 1024
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_537
+; CHECK-RV64-NEXT:    j .LBB61_11
+; CHECK-RV64-NEXT:  .LBB61_537: # %cond.load37
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 11, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 10
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 52
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_538
+; CHECK-RV64-NEXT:    j .LBB61_12
+; CHECK-RV64-NEXT:  .LBB61_538: # %cond.load41
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 12, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 11
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 51
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_539
+; CHECK-RV64-NEXT:    j .LBB61_13
+; CHECK-RV64-NEXT:  .LBB61_539: # %cond.load45
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 13, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 12
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 50
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_540
+; CHECK-RV64-NEXT:    j .LBB61_14
+; CHECK-RV64-NEXT:  .LBB61_540: # %cond.load49
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 14, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 13
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 49
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_541
+; CHECK-RV64-NEXT:    j .LBB61_15
+; CHECK-RV64-NEXT:  .LBB61_541: # %cond.load53
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 15, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 14
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 48
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_542
+; CHECK-RV64-NEXT:    j .LBB61_16
+; CHECK-RV64-NEXT:  .LBB61_542: # %cond.load57
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 16, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 15
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 47
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_543
+; CHECK-RV64-NEXT:    j .LBB61_17
+; CHECK-RV64-NEXT:  .LBB61_543: # %cond.load61
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 17, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 16
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 46
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_544
+; CHECK-RV64-NEXT:    j .LBB61_18
+; CHECK-RV64-NEXT:  .LBB61_544: # %cond.load65
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 18, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 17
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 45
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_545
+; CHECK-RV64-NEXT:    j .LBB61_19
+; CHECK-RV64-NEXT:  .LBB61_545: # %cond.load69
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 19, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 18
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 44
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_546
+; CHECK-RV64-NEXT:    j .LBB61_20
+; CHECK-RV64-NEXT:  .LBB61_546: # %cond.load73
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 20, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 19
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 43
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_547
+; CHECK-RV64-NEXT:    j .LBB61_21
+; CHECK-RV64-NEXT:  .LBB61_547: # %cond.load77
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 21, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 20
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 42
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_548
+; CHECK-RV64-NEXT:    j .LBB61_22
+; CHECK-RV64-NEXT:  .LBB61_548: # %cond.load81
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 22, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 21
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 41
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_549
+; CHECK-RV64-NEXT:    j .LBB61_23
+; CHECK-RV64-NEXT:  .LBB61_549: # %cond.load85
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 23, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 22
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 40
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_550
+; CHECK-RV64-NEXT:    j .LBB61_24
+; CHECK-RV64-NEXT:  .LBB61_550: # %cond.load89
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 24, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 23
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 39
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_551
+; CHECK-RV64-NEXT:    j .LBB61_25
+; CHECK-RV64-NEXT:  .LBB61_551: # %cond.load93
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 25, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 24
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 38
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_552
+; CHECK-RV64-NEXT:    j .LBB61_26
+; CHECK-RV64-NEXT:  .LBB61_552: # %cond.load97
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 26, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 25
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 37
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_553
+; CHECK-RV64-NEXT:    j .LBB61_27
+; CHECK-RV64-NEXT:  .LBB61_553: # %cond.load101
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 27, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 26
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 36
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_554
+; CHECK-RV64-NEXT:    j .LBB61_28
+; CHECK-RV64-NEXT:  .LBB61_554: # %cond.load105
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 28, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 27
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 35
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_555
+; CHECK-RV64-NEXT:    j .LBB61_29
+; CHECK-RV64-NEXT:  .LBB61_555: # %cond.load109
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 29, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 28
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 34
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_556
+; CHECK-RV64-NEXT:    j .LBB61_30
+; CHECK-RV64-NEXT:  .LBB61_556: # %cond.load113
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 30, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 29
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 33
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_557
+; CHECK-RV64-NEXT:    j .LBB61_31
+; CHECK-RV64-NEXT:  .LBB61_557: # %cond.load117
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 31, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 30
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 32
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_558
+; CHECK-RV64-NEXT:    j .LBB61_32
+; CHECK-RV64-NEXT:  .LBB61_558: # %cond.load121
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 32
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vi v8, v24, 31
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 31
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_559
+; CHECK-RV64-NEXT:    j .LBB61_33
+; CHECK-RV64-NEXT:  .LBB61_559: # %cond.load125
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 33
+; CHECK-RV64-NEXT:    li a3, 32
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 30
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_560
+; CHECK-RV64-NEXT:    j .LBB61_34
+; CHECK-RV64-NEXT:  .LBB61_560: # %cond.load129
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 34
+; CHECK-RV64-NEXT:    li a3, 33
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 29
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_561
+; CHECK-RV64-NEXT:    j .LBB61_35
+; CHECK-RV64-NEXT:  .LBB61_561: # %cond.load133
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 35
+; CHECK-RV64-NEXT:    li a3, 34
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 28
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_562
+; CHECK-RV64-NEXT:    j .LBB61_36
+; CHECK-RV64-NEXT:  .LBB61_562: # %cond.load137
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 36
+; CHECK-RV64-NEXT:    li a3, 35
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 27
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_563
+; CHECK-RV64-NEXT:    j .LBB61_37
+; CHECK-RV64-NEXT:  .LBB61_563: # %cond.load141
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 37
+; CHECK-RV64-NEXT:    li a3, 36
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 26
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_564
+; CHECK-RV64-NEXT:    j .LBB61_38
+; CHECK-RV64-NEXT:  .LBB61_564: # %cond.load145
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 38
+; CHECK-RV64-NEXT:    li a3, 37
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 25
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_565
+; CHECK-RV64-NEXT:    j .LBB61_39
+; CHECK-RV64-NEXT:  .LBB61_565: # %cond.load149
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 39
+; CHECK-RV64-NEXT:    li a3, 38
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 24
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_566
+; CHECK-RV64-NEXT:    j .LBB61_40
+; CHECK-RV64-NEXT:  .LBB61_566: # %cond.load153
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 40
+; CHECK-RV64-NEXT:    li a3, 39
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 23
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_567
+; CHECK-RV64-NEXT:    j .LBB61_41
+; CHECK-RV64-NEXT:  .LBB61_567: # %cond.load157
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 41
+; CHECK-RV64-NEXT:    li a3, 40
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 22
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_568
+; CHECK-RV64-NEXT:    j .LBB61_42
+; CHECK-RV64-NEXT:  .LBB61_568: # %cond.load161
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 42
+; CHECK-RV64-NEXT:    li a3, 41
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 21
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_569
+; CHECK-RV64-NEXT:    j .LBB61_43
+; CHECK-RV64-NEXT:  .LBB61_569: # %cond.load165
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 43
+; CHECK-RV64-NEXT:    li a3, 42
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 20
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_570
+; CHECK-RV64-NEXT:    j .LBB61_44
+; CHECK-RV64-NEXT:  .LBB61_570: # %cond.load169
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 44
+; CHECK-RV64-NEXT:    li a3, 43
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 19
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_571
+; CHECK-RV64-NEXT:    j .LBB61_45
+; CHECK-RV64-NEXT:  .LBB61_571: # %cond.load173
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 45
+; CHECK-RV64-NEXT:    li a3, 44
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 18
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_572
+; CHECK-RV64-NEXT:    j .LBB61_46
+; CHECK-RV64-NEXT:  .LBB61_572: # %cond.load177
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 46
+; CHECK-RV64-NEXT:    li a3, 45
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 17
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_573
+; CHECK-RV64-NEXT:    j .LBB61_47
+; CHECK-RV64-NEXT:  .LBB61_573: # %cond.load181
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 47
+; CHECK-RV64-NEXT:    li a3, 46
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 16
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_574
+; CHECK-RV64-NEXT:    j .LBB61_48
+; CHECK-RV64-NEXT:  .LBB61_574: # %cond.load185
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 48
+; CHECK-RV64-NEXT:    li a3, 47
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 15
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_575
+; CHECK-RV64-NEXT:    j .LBB61_49
+; CHECK-RV64-NEXT:  .LBB61_575: # %cond.load189
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 49
+; CHECK-RV64-NEXT:    li a3, 48
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 14
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_576
+; CHECK-RV64-NEXT:    j .LBB61_50
+; CHECK-RV64-NEXT:  .LBB61_576: # %cond.load193
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 50
+; CHECK-RV64-NEXT:    li a3, 49
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 13
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_577
+; CHECK-RV64-NEXT:    j .LBB61_51
+; CHECK-RV64-NEXT:  .LBB61_577: # %cond.load197
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 51
+; CHECK-RV64-NEXT:    li a3, 50
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 12
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_578
+; CHECK-RV64-NEXT:    j .LBB61_52
+; CHECK-RV64-NEXT:  .LBB61_578: # %cond.load201
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 52
+; CHECK-RV64-NEXT:    li a3, 51
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 11
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_579
+; CHECK-RV64-NEXT:    j .LBB61_53
+; CHECK-RV64-NEXT:  .LBB61_579: # %cond.load205
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 53
+; CHECK-RV64-NEXT:    li a3, 52
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 10
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_580
+; CHECK-RV64-NEXT:    j .LBB61_54
+; CHECK-RV64-NEXT:  .LBB61_580: # %cond.load209
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 54
+; CHECK-RV64-NEXT:    li a3, 53
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 9
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_581
+; CHECK-RV64-NEXT:    j .LBB61_55
+; CHECK-RV64-NEXT:  .LBB61_581: # %cond.load213
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 55
+; CHECK-RV64-NEXT:    li a3, 54
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 8
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_582
+; CHECK-RV64-NEXT:    j .LBB61_56
+; CHECK-RV64-NEXT:  .LBB61_582: # %cond.load217
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 56
+; CHECK-RV64-NEXT:    li a3, 55
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 7
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_583
+; CHECK-RV64-NEXT:    j .LBB61_57
+; CHECK-RV64-NEXT:  .LBB61_583: # %cond.load221
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 57
+; CHECK-RV64-NEXT:    li a3, 56
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 6
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_584
+; CHECK-RV64-NEXT:    j .LBB61_58
+; CHECK-RV64-NEXT:  .LBB61_584: # %cond.load225
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 58
+; CHECK-RV64-NEXT:    li a3, 57
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 5
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_585
+; CHECK-RV64-NEXT:    j .LBB61_59
+; CHECK-RV64-NEXT:  .LBB61_585: # %cond.load229
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 59
+; CHECK-RV64-NEXT:    li a3, 58
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 4
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_586
+; CHECK-RV64-NEXT:    j .LBB61_60
+; CHECK-RV64-NEXT:  .LBB61_586: # %cond.load233
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 60
+; CHECK-RV64-NEXT:    li a3, 59
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 3
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_587
+; CHECK-RV64-NEXT:    j .LBB61_61
+; CHECK-RV64-NEXT:  .LBB61_587: # %cond.load237
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 61
+; CHECK-RV64-NEXT:    li a3, 60
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 2
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_1025
+; CHECK-RV64-NEXT:    j .LBB61_62
+; CHECK-RV64-NEXT:  .LBB61_1025: # %cond.load237
+; CHECK-RV64-NEXT:    j .LBB61_63
+; CHECK-RV64-NEXT:  .LBB61_588: # %cond.load249
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 64
+; CHECK-RV64-NEXT:    li a3, 63
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    andi a2, a1, 1
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_589
+; CHECK-RV64-NEXT:    j .LBB61_67
+; CHECK-RV64-NEXT:  .LBB61_589: # %cond.load253
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 65
+; CHECK-RV64-NEXT:    li a3, 64
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    andi a2, a1, 2
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_590
+; CHECK-RV64-NEXT:    j .LBB61_68
+; CHECK-RV64-NEXT:  .LBB61_590: # %cond.load257
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 66
+; CHECK-RV64-NEXT:    li a3, 65
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    andi a2, a1, 4
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_591
+; CHECK-RV64-NEXT:    j .LBB61_69
+; CHECK-RV64-NEXT:  .LBB61_591: # %cond.load261
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 67
+; CHECK-RV64-NEXT:    li a3, 66
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    andi a2, a1, 8
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_592
+; CHECK-RV64-NEXT:    j .LBB61_70
+; CHECK-RV64-NEXT:  .LBB61_592: # %cond.load265
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 68
+; CHECK-RV64-NEXT:    li a3, 67
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    andi a2, a1, 16
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_593
+; CHECK-RV64-NEXT:    j .LBB61_71
+; CHECK-RV64-NEXT:  .LBB61_593: # %cond.load269
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 69
+; CHECK-RV64-NEXT:    li a3, 68
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    andi a2, a1, 32
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_594
+; CHECK-RV64-NEXT:    j .LBB61_72
+; CHECK-RV64-NEXT:  .LBB61_594: # %cond.load273
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 70
+; CHECK-RV64-NEXT:    li a3, 69
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    andi a2, a1, 64
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_595
+; CHECK-RV64-NEXT:    j .LBB61_73
+; CHECK-RV64-NEXT:  .LBB61_595: # %cond.load277
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 71
+; CHECK-RV64-NEXT:    li a3, 70
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    andi a2, a1, 128
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_596
+; CHECK-RV64-NEXT:    j .LBB61_74
+; CHECK-RV64-NEXT:  .LBB61_596: # %cond.load281
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 72
+; CHECK-RV64-NEXT:    li a3, 71
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    andi a2, a1, 256
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_597
+; CHECK-RV64-NEXT:    j .LBB61_75
+; CHECK-RV64-NEXT:  .LBB61_597: # %cond.load285
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 73
+; CHECK-RV64-NEXT:    li a3, 72
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    andi a2, a1, 512
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_598
+; CHECK-RV64-NEXT:    j .LBB61_76
+; CHECK-RV64-NEXT:  .LBB61_598: # %cond.load289
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 74
+; CHECK-RV64-NEXT:    li a3, 73
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    andi a2, a1, 1024
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_599
+; CHECK-RV64-NEXT:    j .LBB61_77
+; CHECK-RV64-NEXT:  .LBB61_599: # %cond.load293
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 75
+; CHECK-RV64-NEXT:    li a3, 74
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 52
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_600
+; CHECK-RV64-NEXT:    j .LBB61_78
+; CHECK-RV64-NEXT:  .LBB61_600: # %cond.load297
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 76
+; CHECK-RV64-NEXT:    li a3, 75
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 51
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_601
+; CHECK-RV64-NEXT:    j .LBB61_79
+; CHECK-RV64-NEXT:  .LBB61_601: # %cond.load301
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 77
+; CHECK-RV64-NEXT:    li a3, 76
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 50
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_602
+; CHECK-RV64-NEXT:    j .LBB61_80
+; CHECK-RV64-NEXT:  .LBB61_602: # %cond.load305
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 78
+; CHECK-RV64-NEXT:    li a3, 77
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 49
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_603
+; CHECK-RV64-NEXT:    j .LBB61_81
+; CHECK-RV64-NEXT:  .LBB61_603: # %cond.load309
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 79
+; CHECK-RV64-NEXT:    li a3, 78
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 48
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_604
+; CHECK-RV64-NEXT:    j .LBB61_82
+; CHECK-RV64-NEXT:  .LBB61_604: # %cond.load313
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 80
+; CHECK-RV64-NEXT:    li a3, 79
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 47
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_605
+; CHECK-RV64-NEXT:    j .LBB61_83
+; CHECK-RV64-NEXT:  .LBB61_605: # %cond.load317
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 81
+; CHECK-RV64-NEXT:    li a3, 80
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 46
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_606
+; CHECK-RV64-NEXT:    j .LBB61_84
+; CHECK-RV64-NEXT:  .LBB61_606: # %cond.load321
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 82
+; CHECK-RV64-NEXT:    li a3, 81
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 45
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_607
+; CHECK-RV64-NEXT:    j .LBB61_85
+; CHECK-RV64-NEXT:  .LBB61_607: # %cond.load325
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 83
+; CHECK-RV64-NEXT:    li a3, 82
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 44
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_608
+; CHECK-RV64-NEXT:    j .LBB61_86
+; CHECK-RV64-NEXT:  .LBB61_608: # %cond.load329
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 84
+; CHECK-RV64-NEXT:    li a3, 83
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 43
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_609
+; CHECK-RV64-NEXT:    j .LBB61_87
+; CHECK-RV64-NEXT:  .LBB61_609: # %cond.load333
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 85
+; CHECK-RV64-NEXT:    li a3, 84
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 42
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_610
+; CHECK-RV64-NEXT:    j .LBB61_88
+; CHECK-RV64-NEXT:  .LBB61_610: # %cond.load337
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 86
+; CHECK-RV64-NEXT:    li a3, 85
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 41
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_611
+; CHECK-RV64-NEXT:    j .LBB61_89
+; CHECK-RV64-NEXT:  .LBB61_611: # %cond.load341
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 87
+; CHECK-RV64-NEXT:    li a3, 86
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 40
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_612
+; CHECK-RV64-NEXT:    j .LBB61_90
+; CHECK-RV64-NEXT:  .LBB61_612: # %cond.load345
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 88
+; CHECK-RV64-NEXT:    li a3, 87
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 39
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_613
+; CHECK-RV64-NEXT:    j .LBB61_91
+; CHECK-RV64-NEXT:  .LBB61_613: # %cond.load349
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 89
+; CHECK-RV64-NEXT:    li a3, 88
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 38
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_614
+; CHECK-RV64-NEXT:    j .LBB61_92
+; CHECK-RV64-NEXT:  .LBB61_614: # %cond.load353
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 90
+; CHECK-RV64-NEXT:    li a3, 89
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 37
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_615
+; CHECK-RV64-NEXT:    j .LBB61_93
+; CHECK-RV64-NEXT:  .LBB61_615: # %cond.load357
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 91
+; CHECK-RV64-NEXT:    li a3, 90
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 36
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_616
+; CHECK-RV64-NEXT:    j .LBB61_94
+; CHECK-RV64-NEXT:  .LBB61_616: # %cond.load361
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 92
+; CHECK-RV64-NEXT:    li a3, 91
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 35
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_617
+; CHECK-RV64-NEXT:    j .LBB61_95
+; CHECK-RV64-NEXT:  .LBB61_617: # %cond.load365
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 93
+; CHECK-RV64-NEXT:    li a3, 92
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 34
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_618
+; CHECK-RV64-NEXT:    j .LBB61_96
+; CHECK-RV64-NEXT:  .LBB61_618: # %cond.load369
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 94
+; CHECK-RV64-NEXT:    li a3, 93
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 33
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_619
+; CHECK-RV64-NEXT:    j .LBB61_97
+; CHECK-RV64-NEXT:  .LBB61_619: # %cond.load373
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 95
+; CHECK-RV64-NEXT:    li a3, 94
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 32
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_620
+; CHECK-RV64-NEXT:    j .LBB61_98
+; CHECK-RV64-NEXT:  .LBB61_620: # %cond.load377
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 96
+; CHECK-RV64-NEXT:    li a3, 95
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 31
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_621
+; CHECK-RV64-NEXT:    j .LBB61_99
+; CHECK-RV64-NEXT:  .LBB61_621: # %cond.load381
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 97
+; CHECK-RV64-NEXT:    li a3, 96
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 30
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_622
+; CHECK-RV64-NEXT:    j .LBB61_100
+; CHECK-RV64-NEXT:  .LBB61_622: # %cond.load385
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 98
+; CHECK-RV64-NEXT:    li a3, 97
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 29
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_623
+; CHECK-RV64-NEXT:    j .LBB61_101
+; CHECK-RV64-NEXT:  .LBB61_623: # %cond.load389
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 99
+; CHECK-RV64-NEXT:    li a3, 98
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 28
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_624
+; CHECK-RV64-NEXT:    j .LBB61_102
+; CHECK-RV64-NEXT:  .LBB61_624: # %cond.load393
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 100
+; CHECK-RV64-NEXT:    li a3, 99
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 27
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_625
+; CHECK-RV64-NEXT:    j .LBB61_103
+; CHECK-RV64-NEXT:  .LBB61_625: # %cond.load397
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 101
+; CHECK-RV64-NEXT:    li a3, 100
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 26
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_626
+; CHECK-RV64-NEXT:    j .LBB61_104
+; CHECK-RV64-NEXT:  .LBB61_626: # %cond.load401
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 102
+; CHECK-RV64-NEXT:    li a3, 101
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 25
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_627
+; CHECK-RV64-NEXT:    j .LBB61_105
+; CHECK-RV64-NEXT:  .LBB61_627: # %cond.load405
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 103
+; CHECK-RV64-NEXT:    li a3, 102
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 24
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_628
+; CHECK-RV64-NEXT:    j .LBB61_106
+; CHECK-RV64-NEXT:  .LBB61_628: # %cond.load409
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 104
+; CHECK-RV64-NEXT:    li a3, 103
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 23
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_629
+; CHECK-RV64-NEXT:    j .LBB61_107
+; CHECK-RV64-NEXT:  .LBB61_629: # %cond.load413
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 105
+; CHECK-RV64-NEXT:    li a3, 104
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 22
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_630
+; CHECK-RV64-NEXT:    j .LBB61_108
+; CHECK-RV64-NEXT:  .LBB61_630: # %cond.load417
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 106
+; CHECK-RV64-NEXT:    li a3, 105
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 21
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_631
+; CHECK-RV64-NEXT:    j .LBB61_109
+; CHECK-RV64-NEXT:  .LBB61_631: # %cond.load421
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 107
+; CHECK-RV64-NEXT:    li a3, 106
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 20
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_632
+; CHECK-RV64-NEXT:    j .LBB61_110
+; CHECK-RV64-NEXT:  .LBB61_632: # %cond.load425
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 108
+; CHECK-RV64-NEXT:    li a3, 107
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 19
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_633
+; CHECK-RV64-NEXT:    j .LBB61_111
+; CHECK-RV64-NEXT:  .LBB61_633: # %cond.load429
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 109
+; CHECK-RV64-NEXT:    li a3, 108
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 18
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_634
+; CHECK-RV64-NEXT:    j .LBB61_112
+; CHECK-RV64-NEXT:  .LBB61_634: # %cond.load433
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 110
+; CHECK-RV64-NEXT:    li a3, 109
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 17
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_635
+; CHECK-RV64-NEXT:    j .LBB61_113
+; CHECK-RV64-NEXT:  .LBB61_635: # %cond.load437
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 111
+; CHECK-RV64-NEXT:    li a3, 110
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 16
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_636
+; CHECK-RV64-NEXT:    j .LBB61_114
+; CHECK-RV64-NEXT:  .LBB61_636: # %cond.load441
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 112
+; CHECK-RV64-NEXT:    li a3, 111
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 15
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_637
+; CHECK-RV64-NEXT:    j .LBB61_115
+; CHECK-RV64-NEXT:  .LBB61_637: # %cond.load445
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 113
+; CHECK-RV64-NEXT:    li a3, 112
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 14
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_638
+; CHECK-RV64-NEXT:    j .LBB61_116
+; CHECK-RV64-NEXT:  .LBB61_638: # %cond.load449
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 114
+; CHECK-RV64-NEXT:    li a3, 113
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 13
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_639
+; CHECK-RV64-NEXT:    j .LBB61_117
+; CHECK-RV64-NEXT:  .LBB61_639: # %cond.load453
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 115
+; CHECK-RV64-NEXT:    li a3, 114
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 12
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_640
+; CHECK-RV64-NEXT:    j .LBB61_118
+; CHECK-RV64-NEXT:  .LBB61_640: # %cond.load457
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 116
+; CHECK-RV64-NEXT:    li a3, 115
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 11
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_641
+; CHECK-RV64-NEXT:    j .LBB61_119
+; CHECK-RV64-NEXT:  .LBB61_641: # %cond.load461
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 117
+; CHECK-RV64-NEXT:    li a3, 116
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 10
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_642
+; CHECK-RV64-NEXT:    j .LBB61_120
+; CHECK-RV64-NEXT:  .LBB61_642: # %cond.load465
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 118
+; CHECK-RV64-NEXT:    li a3, 117
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 9
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_643
+; CHECK-RV64-NEXT:    j .LBB61_121
+; CHECK-RV64-NEXT:  .LBB61_643: # %cond.load469
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 119
+; CHECK-RV64-NEXT:    li a3, 118
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 8
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_644
+; CHECK-RV64-NEXT:    j .LBB61_122
+; CHECK-RV64-NEXT:  .LBB61_644: # %cond.load473
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 120
+; CHECK-RV64-NEXT:    li a3, 119
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 7
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_645
+; CHECK-RV64-NEXT:    j .LBB61_123
+; CHECK-RV64-NEXT:  .LBB61_645: # %cond.load477
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 121
+; CHECK-RV64-NEXT:    li a3, 120
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 6
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_646
+; CHECK-RV64-NEXT:    j .LBB61_124
+; CHECK-RV64-NEXT:  .LBB61_646: # %cond.load481
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 122
+; CHECK-RV64-NEXT:    li a3, 121
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 5
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_647
+; CHECK-RV64-NEXT:    j .LBB61_125
+; CHECK-RV64-NEXT:  .LBB61_647: # %cond.load485
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 123
+; CHECK-RV64-NEXT:    li a3, 122
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 4
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_648
+; CHECK-RV64-NEXT:    j .LBB61_126
+; CHECK-RV64-NEXT:  .LBB61_648: # %cond.load489
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 124
+; CHECK-RV64-NEXT:    li a3, 123
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 3
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_649
+; CHECK-RV64-NEXT:    j .LBB61_127
+; CHECK-RV64-NEXT:  .LBB61_649: # %cond.load493
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 125
+; CHECK-RV64-NEXT:    li a3, 124
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 2
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_1026
+; CHECK-RV64-NEXT:    j .LBB61_128
+; CHECK-RV64-NEXT:  .LBB61_1026: # %cond.load493
+; CHECK-RV64-NEXT:    j .LBB61_129
+; CHECK-RV64-NEXT:  .LBB61_650: # %cond.load505
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 128
+; CHECK-RV64-NEXT:    li a3, 127
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    andi a1, a2, 1
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_651
+; CHECK-RV64-NEXT:    j .LBB61_133
+; CHECK-RV64-NEXT:  .LBB61_651: # %cond.load509
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 129
+; CHECK-RV64-NEXT:    li a3, 128
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a1, a2, 2
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_652
+; CHECK-RV64-NEXT:    j .LBB61_134
+; CHECK-RV64-NEXT:  .LBB61_652: # %cond.load513
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 130
+; CHECK-RV64-NEXT:    li a3, 129
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a1, a2, 4
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_653
+; CHECK-RV64-NEXT:    j .LBB61_135
+; CHECK-RV64-NEXT:  .LBB61_653: # %cond.load517
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 131
+; CHECK-RV64-NEXT:    li a3, 130
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a1, a2, 8
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_654
+; CHECK-RV64-NEXT:    j .LBB61_136
+; CHECK-RV64-NEXT:  .LBB61_654: # %cond.load521
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 132
+; CHECK-RV64-NEXT:    li a3, 131
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a1, a2, 16
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_655
+; CHECK-RV64-NEXT:    j .LBB61_137
+; CHECK-RV64-NEXT:  .LBB61_655: # %cond.load525
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 133
+; CHECK-RV64-NEXT:    li a3, 132
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a1, a2, 32
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_656
+; CHECK-RV64-NEXT:    j .LBB61_138
+; CHECK-RV64-NEXT:  .LBB61_656: # %cond.load529
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 134
+; CHECK-RV64-NEXT:    li a3, 133
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a1, a2, 64
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_657
+; CHECK-RV64-NEXT:    j .LBB61_139
+; CHECK-RV64-NEXT:  .LBB61_657: # %cond.load533
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 135
+; CHECK-RV64-NEXT:    li a3, 134
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a1, a2, 128
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_658
+; CHECK-RV64-NEXT:    j .LBB61_140
+; CHECK-RV64-NEXT:  .LBB61_658: # %cond.load537
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 136
+; CHECK-RV64-NEXT:    li a3, 135
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a1, a2, 256
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_659
+; CHECK-RV64-NEXT:    j .LBB61_141
+; CHECK-RV64-NEXT:  .LBB61_659: # %cond.load541
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 137
+; CHECK-RV64-NEXT:    li a3, 136
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a1, a2, 512
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_660
+; CHECK-RV64-NEXT:    j .LBB61_142
+; CHECK-RV64-NEXT:  .LBB61_660: # %cond.load545
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 138
+; CHECK-RV64-NEXT:    li a3, 137
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a1, a2, 1024
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_661
+; CHECK-RV64-NEXT:    j .LBB61_143
+; CHECK-RV64-NEXT:  .LBB61_661: # %cond.load549
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 139
+; CHECK-RV64-NEXT:    li a3, 138
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 52
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_662
+; CHECK-RV64-NEXT:    j .LBB61_144
+; CHECK-RV64-NEXT:  .LBB61_662: # %cond.load553
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 140
+; CHECK-RV64-NEXT:    li a3, 139
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 51
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_663
+; CHECK-RV64-NEXT:    j .LBB61_145
+; CHECK-RV64-NEXT:  .LBB61_663: # %cond.load557
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 141
+; CHECK-RV64-NEXT:    li a3, 140
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 50
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_664
+; CHECK-RV64-NEXT:    j .LBB61_146
+; CHECK-RV64-NEXT:  .LBB61_664: # %cond.load561
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 142
+; CHECK-RV64-NEXT:    li a3, 141
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 49
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_665
+; CHECK-RV64-NEXT:    j .LBB61_147
+; CHECK-RV64-NEXT:  .LBB61_665: # %cond.load565
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 143
+; CHECK-RV64-NEXT:    li a3, 142
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 48
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_666
+; CHECK-RV64-NEXT:    j .LBB61_148
+; CHECK-RV64-NEXT:  .LBB61_666: # %cond.load569
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 144
+; CHECK-RV64-NEXT:    li a3, 143
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 47
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_667
+; CHECK-RV64-NEXT:    j .LBB61_149
+; CHECK-RV64-NEXT:  .LBB61_667: # %cond.load573
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 145
+; CHECK-RV64-NEXT:    li a3, 144
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 46
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_668
+; CHECK-RV64-NEXT:    j .LBB61_150
+; CHECK-RV64-NEXT:  .LBB61_668: # %cond.load577
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 146
+; CHECK-RV64-NEXT:    li a3, 145
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 45
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_669
+; CHECK-RV64-NEXT:    j .LBB61_151
+; CHECK-RV64-NEXT:  .LBB61_669: # %cond.load581
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 147
+; CHECK-RV64-NEXT:    li a3, 146
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 44
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_670
+; CHECK-RV64-NEXT:    j .LBB61_152
+; CHECK-RV64-NEXT:  .LBB61_670: # %cond.load585
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 148
+; CHECK-RV64-NEXT:    li a3, 147
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 43
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_671
+; CHECK-RV64-NEXT:    j .LBB61_153
+; CHECK-RV64-NEXT:  .LBB61_671: # %cond.load589
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 149
+; CHECK-RV64-NEXT:    li a3, 148
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 42
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_672
+; CHECK-RV64-NEXT:    j .LBB61_154
+; CHECK-RV64-NEXT:  .LBB61_672: # %cond.load593
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 150
+; CHECK-RV64-NEXT:    li a3, 149
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 41
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_673
+; CHECK-RV64-NEXT:    j .LBB61_155
+; CHECK-RV64-NEXT:  .LBB61_673: # %cond.load597
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 151
+; CHECK-RV64-NEXT:    li a3, 150
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 40
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_674
+; CHECK-RV64-NEXT:    j .LBB61_156
+; CHECK-RV64-NEXT:  .LBB61_674: # %cond.load601
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 152
+; CHECK-RV64-NEXT:    li a3, 151
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 39
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_675
+; CHECK-RV64-NEXT:    j .LBB61_157
+; CHECK-RV64-NEXT:  .LBB61_675: # %cond.load605
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 153
+; CHECK-RV64-NEXT:    li a3, 152
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 38
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_676
+; CHECK-RV64-NEXT:    j .LBB61_158
+; CHECK-RV64-NEXT:  .LBB61_676: # %cond.load609
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 154
+; CHECK-RV64-NEXT:    li a3, 153
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 37
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_677
+; CHECK-RV64-NEXT:    j .LBB61_159
+; CHECK-RV64-NEXT:  .LBB61_677: # %cond.load613
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 155
+; CHECK-RV64-NEXT:    li a3, 154
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 36
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_678
+; CHECK-RV64-NEXT:    j .LBB61_160
+; CHECK-RV64-NEXT:  .LBB61_678: # %cond.load617
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 156
+; CHECK-RV64-NEXT:    li a3, 155
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 35
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_679
+; CHECK-RV64-NEXT:    j .LBB61_161
+; CHECK-RV64-NEXT:  .LBB61_679: # %cond.load621
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 157
+; CHECK-RV64-NEXT:    li a3, 156
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 34
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_680
+; CHECK-RV64-NEXT:    j .LBB61_162
+; CHECK-RV64-NEXT:  .LBB61_680: # %cond.load625
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 158
+; CHECK-RV64-NEXT:    li a3, 157
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 33
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_681
+; CHECK-RV64-NEXT:    j .LBB61_163
+; CHECK-RV64-NEXT:  .LBB61_681: # %cond.load629
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 159
+; CHECK-RV64-NEXT:    li a3, 158
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 32
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_682
+; CHECK-RV64-NEXT:    j .LBB61_164
+; CHECK-RV64-NEXT:  .LBB61_682: # %cond.load633
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 160
+; CHECK-RV64-NEXT:    li a3, 159
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 31
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_683
+; CHECK-RV64-NEXT:    j .LBB61_165
+; CHECK-RV64-NEXT:  .LBB61_683: # %cond.load637
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 161
+; CHECK-RV64-NEXT:    li a3, 160
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 30
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_684
+; CHECK-RV64-NEXT:    j .LBB61_166
+; CHECK-RV64-NEXT:  .LBB61_684: # %cond.load641
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 162
+; CHECK-RV64-NEXT:    li a3, 161
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 29
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_685
+; CHECK-RV64-NEXT:    j .LBB61_167
+; CHECK-RV64-NEXT:  .LBB61_685: # %cond.load645
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 163
+; CHECK-RV64-NEXT:    li a3, 162
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 28
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_686
+; CHECK-RV64-NEXT:    j .LBB61_168
+; CHECK-RV64-NEXT:  .LBB61_686: # %cond.load649
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 164
+; CHECK-RV64-NEXT:    li a3, 163
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 27
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_687
+; CHECK-RV64-NEXT:    j .LBB61_169
+; CHECK-RV64-NEXT:  .LBB61_687: # %cond.load653
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 165
+; CHECK-RV64-NEXT:    li a3, 164
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 26
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_688
+; CHECK-RV64-NEXT:    j .LBB61_170
+; CHECK-RV64-NEXT:  .LBB61_688: # %cond.load657
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 166
+; CHECK-RV64-NEXT:    li a3, 165
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 25
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_689
+; CHECK-RV64-NEXT:    j .LBB61_171
+; CHECK-RV64-NEXT:  .LBB61_689: # %cond.load661
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 167
+; CHECK-RV64-NEXT:    li a3, 166
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 24
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_690
+; CHECK-RV64-NEXT:    j .LBB61_172
+; CHECK-RV64-NEXT:  .LBB61_690: # %cond.load665
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 168
+; CHECK-RV64-NEXT:    li a3, 167
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 23
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_691
+; CHECK-RV64-NEXT:    j .LBB61_173
+; CHECK-RV64-NEXT:  .LBB61_691: # %cond.load669
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 169
+; CHECK-RV64-NEXT:    li a3, 168
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 22
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_692
+; CHECK-RV64-NEXT:    j .LBB61_174
+; CHECK-RV64-NEXT:  .LBB61_692: # %cond.load673
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 170
+; CHECK-RV64-NEXT:    li a3, 169
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 21
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_693
+; CHECK-RV64-NEXT:    j .LBB61_175
+; CHECK-RV64-NEXT:  .LBB61_693: # %cond.load677
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 171
+; CHECK-RV64-NEXT:    li a3, 170
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 20
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_694
+; CHECK-RV64-NEXT:    j .LBB61_176
+; CHECK-RV64-NEXT:  .LBB61_694: # %cond.load681
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 172
+; CHECK-RV64-NEXT:    li a3, 171
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 19
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_695
+; CHECK-RV64-NEXT:    j .LBB61_177
+; CHECK-RV64-NEXT:  .LBB61_695: # %cond.load685
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 173
+; CHECK-RV64-NEXT:    li a3, 172
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 18
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_696
+; CHECK-RV64-NEXT:    j .LBB61_178
+; CHECK-RV64-NEXT:  .LBB61_696: # %cond.load689
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 174
+; CHECK-RV64-NEXT:    li a3, 173
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 17
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_697
+; CHECK-RV64-NEXT:    j .LBB61_179
+; CHECK-RV64-NEXT:  .LBB61_697: # %cond.load693
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 175
+; CHECK-RV64-NEXT:    li a3, 174
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 16
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_698
+; CHECK-RV64-NEXT:    j .LBB61_180
+; CHECK-RV64-NEXT:  .LBB61_698: # %cond.load697
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 176
+; CHECK-RV64-NEXT:    li a3, 175
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 15
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_699
+; CHECK-RV64-NEXT:    j .LBB61_181
+; CHECK-RV64-NEXT:  .LBB61_699: # %cond.load701
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 177
+; CHECK-RV64-NEXT:    li a3, 176
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 14
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_700
+; CHECK-RV64-NEXT:    j .LBB61_182
+; CHECK-RV64-NEXT:  .LBB61_700: # %cond.load705
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 178
+; CHECK-RV64-NEXT:    li a3, 177
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 13
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_701
+; CHECK-RV64-NEXT:    j .LBB61_183
+; CHECK-RV64-NEXT:  .LBB61_701: # %cond.load709
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 179
+; CHECK-RV64-NEXT:    li a3, 178
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 12
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_702
+; CHECK-RV64-NEXT:    j .LBB61_184
+; CHECK-RV64-NEXT:  .LBB61_702: # %cond.load713
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 180
+; CHECK-RV64-NEXT:    li a3, 179
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 11
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_703
+; CHECK-RV64-NEXT:    j .LBB61_185
+; CHECK-RV64-NEXT:  .LBB61_703: # %cond.load717
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 181
+; CHECK-RV64-NEXT:    li a3, 180
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 10
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_704
+; CHECK-RV64-NEXT:    j .LBB61_186
+; CHECK-RV64-NEXT:  .LBB61_704: # %cond.load721
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 182
+; CHECK-RV64-NEXT:    li a3, 181
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 9
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_705
+; CHECK-RV64-NEXT:    j .LBB61_187
+; CHECK-RV64-NEXT:  .LBB61_705: # %cond.load725
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 183
+; CHECK-RV64-NEXT:    li a3, 182
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 8
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_706
+; CHECK-RV64-NEXT:    j .LBB61_188
+; CHECK-RV64-NEXT:  .LBB61_706: # %cond.load729
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 184
+; CHECK-RV64-NEXT:    li a3, 183
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 7
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_707
+; CHECK-RV64-NEXT:    j .LBB61_189
+; CHECK-RV64-NEXT:  .LBB61_707: # %cond.load733
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 185
+; CHECK-RV64-NEXT:    li a3, 184
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 6
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_708
+; CHECK-RV64-NEXT:    j .LBB61_190
+; CHECK-RV64-NEXT:  .LBB61_708: # %cond.load737
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 186
+; CHECK-RV64-NEXT:    li a3, 185
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 5
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_709
+; CHECK-RV64-NEXT:    j .LBB61_191
+; CHECK-RV64-NEXT:  .LBB61_709: # %cond.load741
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 187
+; CHECK-RV64-NEXT:    li a3, 186
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 4
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_710
+; CHECK-RV64-NEXT:    j .LBB61_192
+; CHECK-RV64-NEXT:  .LBB61_710: # %cond.load745
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 188
+; CHECK-RV64-NEXT:    li a3, 187
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 3
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_711
+; CHECK-RV64-NEXT:    j .LBB61_193
+; CHECK-RV64-NEXT:  .LBB61_711: # %cond.load749
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 189
+; CHECK-RV64-NEXT:    li a3, 188
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 2
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_1027
+; CHECK-RV64-NEXT:    j .LBB61_194
+; CHECK-RV64-NEXT:  .LBB61_1027: # %cond.load749
+; CHECK-RV64-NEXT:    j .LBB61_195
+; CHECK-RV64-NEXT:  .LBB61_712: # %cond.load761
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 192
+; CHECK-RV64-NEXT:    li a3, 191
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a2, a1, 1
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_713
+; CHECK-RV64-NEXT:    j .LBB61_199
+; CHECK-RV64-NEXT:  .LBB61_713: # %cond.load765
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 193
+; CHECK-RV64-NEXT:    li a3, 192
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a2, a1, 2
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_714
+; CHECK-RV64-NEXT:    j .LBB61_200
+; CHECK-RV64-NEXT:  .LBB61_714: # %cond.load769
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 194
+; CHECK-RV64-NEXT:    li a3, 193
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a2, a1, 4
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_715
+; CHECK-RV64-NEXT:    j .LBB61_201
+; CHECK-RV64-NEXT:  .LBB61_715: # %cond.load773
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 195
+; CHECK-RV64-NEXT:    li a3, 194
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a2, a1, 8
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_716
+; CHECK-RV64-NEXT:    j .LBB61_202
+; CHECK-RV64-NEXT:  .LBB61_716: # %cond.load777
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 196
+; CHECK-RV64-NEXT:    li a3, 195
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a2, a1, 16
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_717
+; CHECK-RV64-NEXT:    j .LBB61_203
+; CHECK-RV64-NEXT:  .LBB61_717: # %cond.load781
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 197
+; CHECK-RV64-NEXT:    li a3, 196
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a2, a1, 32
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_718
+; CHECK-RV64-NEXT:    j .LBB61_204
+; CHECK-RV64-NEXT:  .LBB61_718: # %cond.load785
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 198
+; CHECK-RV64-NEXT:    li a3, 197
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a2, a1, 64
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_719
+; CHECK-RV64-NEXT:    j .LBB61_205
+; CHECK-RV64-NEXT:  .LBB61_719: # %cond.load789
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 199
+; CHECK-RV64-NEXT:    li a3, 198
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a2, a1, 128
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_720
+; CHECK-RV64-NEXT:    j .LBB61_206
+; CHECK-RV64-NEXT:  .LBB61_720: # %cond.load793
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 200
+; CHECK-RV64-NEXT:    li a3, 199
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a2, a1, 256
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_721
+; CHECK-RV64-NEXT:    j .LBB61_207
+; CHECK-RV64-NEXT:  .LBB61_721: # %cond.load797
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 201
+; CHECK-RV64-NEXT:    li a3, 200
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a2, a1, 512
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_722
+; CHECK-RV64-NEXT:    j .LBB61_208
+; CHECK-RV64-NEXT:  .LBB61_722: # %cond.load801
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 202
+; CHECK-RV64-NEXT:    li a3, 201
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a2, a1, 1024
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_723
+; CHECK-RV64-NEXT:    j .LBB61_209
+; CHECK-RV64-NEXT:  .LBB61_723: # %cond.load805
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 203
+; CHECK-RV64-NEXT:    li a3, 202
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 52
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_724
+; CHECK-RV64-NEXT:    j .LBB61_210
+; CHECK-RV64-NEXT:  .LBB61_724: # %cond.load809
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 204
+; CHECK-RV64-NEXT:    li a3, 203
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 51
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_725
+; CHECK-RV64-NEXT:    j .LBB61_211
+; CHECK-RV64-NEXT:  .LBB61_725: # %cond.load813
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 205
+; CHECK-RV64-NEXT:    li a3, 204
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 50
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_726
+; CHECK-RV64-NEXT:    j .LBB61_212
+; CHECK-RV64-NEXT:  .LBB61_726: # %cond.load817
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 206
+; CHECK-RV64-NEXT:    li a3, 205
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 49
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_727
+; CHECK-RV64-NEXT:    j .LBB61_213
+; CHECK-RV64-NEXT:  .LBB61_727: # %cond.load821
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 207
+; CHECK-RV64-NEXT:    li a3, 206
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 48
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_728
+; CHECK-RV64-NEXT:    j .LBB61_214
+; CHECK-RV64-NEXT:  .LBB61_728: # %cond.load825
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 208
+; CHECK-RV64-NEXT:    li a3, 207
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 47
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_729
+; CHECK-RV64-NEXT:    j .LBB61_215
+; CHECK-RV64-NEXT:  .LBB61_729: # %cond.load829
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 209
+; CHECK-RV64-NEXT:    li a3, 208
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 46
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_730
+; CHECK-RV64-NEXT:    j .LBB61_216
+; CHECK-RV64-NEXT:  .LBB61_730: # %cond.load833
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 210
+; CHECK-RV64-NEXT:    li a3, 209
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 45
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_731
+; CHECK-RV64-NEXT:    j .LBB61_217
+; CHECK-RV64-NEXT:  .LBB61_731: # %cond.load837
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 211
+; CHECK-RV64-NEXT:    li a3, 210
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 44
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_732
+; CHECK-RV64-NEXT:    j .LBB61_218
+; CHECK-RV64-NEXT:  .LBB61_732: # %cond.load841
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 212
+; CHECK-RV64-NEXT:    li a3, 211
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 43
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_733
+; CHECK-RV64-NEXT:    j .LBB61_219
+; CHECK-RV64-NEXT:  .LBB61_733: # %cond.load845
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 213
+; CHECK-RV64-NEXT:    li a3, 212
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 42
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_734
+; CHECK-RV64-NEXT:    j .LBB61_220
+; CHECK-RV64-NEXT:  .LBB61_734: # %cond.load849
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 214
+; CHECK-RV64-NEXT:    li a3, 213
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 41
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_735
+; CHECK-RV64-NEXT:    j .LBB61_221
+; CHECK-RV64-NEXT:  .LBB61_735: # %cond.load853
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 215
+; CHECK-RV64-NEXT:    li a3, 214
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 40
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_736
+; CHECK-RV64-NEXT:    j .LBB61_222
+; CHECK-RV64-NEXT:  .LBB61_736: # %cond.load857
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 216
+; CHECK-RV64-NEXT:    li a3, 215
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 39
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_737
+; CHECK-RV64-NEXT:    j .LBB61_223
+; CHECK-RV64-NEXT:  .LBB61_737: # %cond.load861
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 217
+; CHECK-RV64-NEXT:    li a3, 216
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 38
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_738
+; CHECK-RV64-NEXT:    j .LBB61_224
+; CHECK-RV64-NEXT:  .LBB61_738: # %cond.load865
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 218
+; CHECK-RV64-NEXT:    li a3, 217
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 37
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_739
+; CHECK-RV64-NEXT:    j .LBB61_225
+; CHECK-RV64-NEXT:  .LBB61_739: # %cond.load869
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 219
+; CHECK-RV64-NEXT:    li a3, 218
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 36
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_740
+; CHECK-RV64-NEXT:    j .LBB61_226
+; CHECK-RV64-NEXT:  .LBB61_740: # %cond.load873
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 220
+; CHECK-RV64-NEXT:    li a3, 219
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 35
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_741
+; CHECK-RV64-NEXT:    j .LBB61_227
+; CHECK-RV64-NEXT:  .LBB61_741: # %cond.load877
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 221
+; CHECK-RV64-NEXT:    li a3, 220
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 34
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_742
+; CHECK-RV64-NEXT:    j .LBB61_228
+; CHECK-RV64-NEXT:  .LBB61_742: # %cond.load881
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 222
+; CHECK-RV64-NEXT:    li a3, 221
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 33
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_743
+; CHECK-RV64-NEXT:    j .LBB61_229
+; CHECK-RV64-NEXT:  .LBB61_743: # %cond.load885
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 223
+; CHECK-RV64-NEXT:    li a3, 222
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 32
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_744
+; CHECK-RV64-NEXT:    j .LBB61_230
+; CHECK-RV64-NEXT:  .LBB61_744: # %cond.load889
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 224
+; CHECK-RV64-NEXT:    li a3, 223
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 31
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_745
+; CHECK-RV64-NEXT:    j .LBB61_231
+; CHECK-RV64-NEXT:  .LBB61_745: # %cond.load893
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 225
+; CHECK-RV64-NEXT:    li a3, 224
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 30
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_746
+; CHECK-RV64-NEXT:    j .LBB61_232
+; CHECK-RV64-NEXT:  .LBB61_746: # %cond.load897
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 226
+; CHECK-RV64-NEXT:    li a3, 225
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 29
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_747
+; CHECK-RV64-NEXT:    j .LBB61_233
+; CHECK-RV64-NEXT:  .LBB61_747: # %cond.load901
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 227
+; CHECK-RV64-NEXT:    li a3, 226
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 28
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_748
+; CHECK-RV64-NEXT:    j .LBB61_234
+; CHECK-RV64-NEXT:  .LBB61_748: # %cond.load905
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 228
+; CHECK-RV64-NEXT:    li a3, 227
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 27
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_749
+; CHECK-RV64-NEXT:    j .LBB61_235
+; CHECK-RV64-NEXT:  .LBB61_749: # %cond.load909
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 229
+; CHECK-RV64-NEXT:    li a3, 228
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 26
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_750
+; CHECK-RV64-NEXT:    j .LBB61_236
+; CHECK-RV64-NEXT:  .LBB61_750: # %cond.load913
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 230
+; CHECK-RV64-NEXT:    li a3, 229
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 25
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_751
+; CHECK-RV64-NEXT:    j .LBB61_237
+; CHECK-RV64-NEXT:  .LBB61_751: # %cond.load917
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 231
+; CHECK-RV64-NEXT:    li a3, 230
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 24
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_752
+; CHECK-RV64-NEXT:    j .LBB61_238
+; CHECK-RV64-NEXT:  .LBB61_752: # %cond.load921
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 232
+; CHECK-RV64-NEXT:    li a3, 231
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 23
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_753
+; CHECK-RV64-NEXT:    j .LBB61_239
+; CHECK-RV64-NEXT:  .LBB61_753: # %cond.load925
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 233
+; CHECK-RV64-NEXT:    li a3, 232
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 22
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_754
+; CHECK-RV64-NEXT:    j .LBB61_240
+; CHECK-RV64-NEXT:  .LBB61_754: # %cond.load929
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 234
+; CHECK-RV64-NEXT:    li a3, 233
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 21
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_755
+; CHECK-RV64-NEXT:    j .LBB61_241
+; CHECK-RV64-NEXT:  .LBB61_755: # %cond.load933
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 235
+; CHECK-RV64-NEXT:    li a3, 234
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 20
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_756
+; CHECK-RV64-NEXT:    j .LBB61_242
+; CHECK-RV64-NEXT:  .LBB61_756: # %cond.load937
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 236
+; CHECK-RV64-NEXT:    li a3, 235
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 19
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_757
+; CHECK-RV64-NEXT:    j .LBB61_243
+; CHECK-RV64-NEXT:  .LBB61_757: # %cond.load941
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 237
+; CHECK-RV64-NEXT:    li a3, 236
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 18
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_758
+; CHECK-RV64-NEXT:    j .LBB61_244
+; CHECK-RV64-NEXT:  .LBB61_758: # %cond.load945
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 238
+; CHECK-RV64-NEXT:    li a3, 237
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 17
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_759
+; CHECK-RV64-NEXT:    j .LBB61_245
+; CHECK-RV64-NEXT:  .LBB61_759: # %cond.load949
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 239
+; CHECK-RV64-NEXT:    li a3, 238
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 16
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_760
+; CHECK-RV64-NEXT:    j .LBB61_246
+; CHECK-RV64-NEXT:  .LBB61_760: # %cond.load953
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 240
+; CHECK-RV64-NEXT:    li a3, 239
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 15
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_761
+; CHECK-RV64-NEXT:    j .LBB61_247
+; CHECK-RV64-NEXT:  .LBB61_761: # %cond.load957
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 241
+; CHECK-RV64-NEXT:    li a3, 240
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 14
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_762
+; CHECK-RV64-NEXT:    j .LBB61_248
+; CHECK-RV64-NEXT:  .LBB61_762: # %cond.load961
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 242
+; CHECK-RV64-NEXT:    li a3, 241
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 13
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_763
+; CHECK-RV64-NEXT:    j .LBB61_249
+; CHECK-RV64-NEXT:  .LBB61_763: # %cond.load965
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 243
+; CHECK-RV64-NEXT:    li a3, 242
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 12
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_764
+; CHECK-RV64-NEXT:    j .LBB61_250
+; CHECK-RV64-NEXT:  .LBB61_764: # %cond.load969
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 244
+; CHECK-RV64-NEXT:    li a3, 243
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 11
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_765
+; CHECK-RV64-NEXT:    j .LBB61_251
+; CHECK-RV64-NEXT:  .LBB61_765: # %cond.load973
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 245
+; CHECK-RV64-NEXT:    li a3, 244
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 10
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_766
+; CHECK-RV64-NEXT:    j .LBB61_252
+; CHECK-RV64-NEXT:  .LBB61_766: # %cond.load977
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 246
+; CHECK-RV64-NEXT:    li a3, 245
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 9
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_767
+; CHECK-RV64-NEXT:    j .LBB61_253
+; CHECK-RV64-NEXT:  .LBB61_767: # %cond.load981
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 247
+; CHECK-RV64-NEXT:    li a3, 246
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 8
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_768
+; CHECK-RV64-NEXT:    j .LBB61_254
+; CHECK-RV64-NEXT:  .LBB61_768: # %cond.load985
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 248
+; CHECK-RV64-NEXT:    li a3, 247
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 7
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_769
+; CHECK-RV64-NEXT:    j .LBB61_255
+; CHECK-RV64-NEXT:  .LBB61_769: # %cond.load989
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 249
+; CHECK-RV64-NEXT:    li a3, 248
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 6
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_770
+; CHECK-RV64-NEXT:    j .LBB61_256
+; CHECK-RV64-NEXT:  .LBB61_770: # %cond.load993
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 250
+; CHECK-RV64-NEXT:    li a3, 249
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 5
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_771
+; CHECK-RV64-NEXT:    j .LBB61_257
+; CHECK-RV64-NEXT:  .LBB61_771: # %cond.load997
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 251
+; CHECK-RV64-NEXT:    li a3, 250
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 4
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_772
+; CHECK-RV64-NEXT:    j .LBB61_258
+; CHECK-RV64-NEXT:  .LBB61_772: # %cond.load1001
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 252
+; CHECK-RV64-NEXT:    li a3, 251
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 3
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_773
+; CHECK-RV64-NEXT:    j .LBB61_259
+; CHECK-RV64-NEXT:  .LBB61_773: # %cond.load1005
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 253
+; CHECK-RV64-NEXT:    li a3, 252
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 2
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_1028
+; CHECK-RV64-NEXT:    j .LBB61_260
+; CHECK-RV64-NEXT:  .LBB61_1028: # %cond.load1005
+; CHECK-RV64-NEXT:    j .LBB61_261
+; CHECK-RV64-NEXT:  .LBB61_774: # %cond.load1017
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 256
+; CHECK-RV64-NEXT:    li a3, 255
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a1, a2, 1
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_775
+; CHECK-RV64-NEXT:    j .LBB61_265
+; CHECK-RV64-NEXT:  .LBB61_775: # %cond.load1021
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 257
+; CHECK-RV64-NEXT:    li a3, 256
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 2
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_776
+; CHECK-RV64-NEXT:    j .LBB61_266
+; CHECK-RV64-NEXT:  .LBB61_776: # %cond.load1025
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 258
+; CHECK-RV64-NEXT:    li a3, 257
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 4
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_777
+; CHECK-RV64-NEXT:    j .LBB61_267
+; CHECK-RV64-NEXT:  .LBB61_777: # %cond.load1029
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 259
+; CHECK-RV64-NEXT:    li a3, 258
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 8
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_778
+; CHECK-RV64-NEXT:    j .LBB61_268
+; CHECK-RV64-NEXT:  .LBB61_778: # %cond.load1033
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 260
+; CHECK-RV64-NEXT:    li a3, 259
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 16
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_779
+; CHECK-RV64-NEXT:    j .LBB61_269
+; CHECK-RV64-NEXT:  .LBB61_779: # %cond.load1037
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 261
+; CHECK-RV64-NEXT:    li a3, 260
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 32
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_780
+; CHECK-RV64-NEXT:    j .LBB61_270
+; CHECK-RV64-NEXT:  .LBB61_780: # %cond.load1041
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 262
+; CHECK-RV64-NEXT:    li a3, 261
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 64
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_781
+; CHECK-RV64-NEXT:    j .LBB61_271
+; CHECK-RV64-NEXT:  .LBB61_781: # %cond.load1045
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 263
+; CHECK-RV64-NEXT:    li a3, 262
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 128
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_782
+; CHECK-RV64-NEXT:    j .LBB61_272
+; CHECK-RV64-NEXT:  .LBB61_782: # %cond.load1049
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 264
+; CHECK-RV64-NEXT:    li a3, 263
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 256
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_783
+; CHECK-RV64-NEXT:    j .LBB61_273
+; CHECK-RV64-NEXT:  .LBB61_783: # %cond.load1053
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 265
+; CHECK-RV64-NEXT:    li a3, 264
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 512
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_784
+; CHECK-RV64-NEXT:    j .LBB61_274
+; CHECK-RV64-NEXT:  .LBB61_784: # %cond.load1057
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 266
+; CHECK-RV64-NEXT:    li a3, 265
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 1024
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_785
+; CHECK-RV64-NEXT:    j .LBB61_275
+; CHECK-RV64-NEXT:  .LBB61_785: # %cond.load1061
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 267
+; CHECK-RV64-NEXT:    li a3, 266
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 52
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_786
+; CHECK-RV64-NEXT:    j .LBB61_276
+; CHECK-RV64-NEXT:  .LBB61_786: # %cond.load1065
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 268
+; CHECK-RV64-NEXT:    li a3, 267
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 51
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_787
+; CHECK-RV64-NEXT:    j .LBB61_277
+; CHECK-RV64-NEXT:  .LBB61_787: # %cond.load1069
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 269
+; CHECK-RV64-NEXT:    li a3, 268
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 50
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_788
+; CHECK-RV64-NEXT:    j .LBB61_278
+; CHECK-RV64-NEXT:  .LBB61_788: # %cond.load1073
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 270
+; CHECK-RV64-NEXT:    li a3, 269
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 49
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_789
+; CHECK-RV64-NEXT:    j .LBB61_279
+; CHECK-RV64-NEXT:  .LBB61_789: # %cond.load1077
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 271
+; CHECK-RV64-NEXT:    li a3, 270
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 48
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_790
+; CHECK-RV64-NEXT:    j .LBB61_280
+; CHECK-RV64-NEXT:  .LBB61_790: # %cond.load1081
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 272
+; CHECK-RV64-NEXT:    li a3, 271
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 47
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_791
+; CHECK-RV64-NEXT:    j .LBB61_281
+; CHECK-RV64-NEXT:  .LBB61_791: # %cond.load1085
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 273
+; CHECK-RV64-NEXT:    li a3, 272
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 46
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_792
+; CHECK-RV64-NEXT:    j .LBB61_282
+; CHECK-RV64-NEXT:  .LBB61_792: # %cond.load1089
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 274
+; CHECK-RV64-NEXT:    li a3, 273
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 45
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_793
+; CHECK-RV64-NEXT:    j .LBB61_283
+; CHECK-RV64-NEXT:  .LBB61_793: # %cond.load1093
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 275
+; CHECK-RV64-NEXT:    li a3, 274
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 44
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_794
+; CHECK-RV64-NEXT:    j .LBB61_284
+; CHECK-RV64-NEXT:  .LBB61_794: # %cond.load1097
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 276
+; CHECK-RV64-NEXT:    li a3, 275
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 43
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_795
+; CHECK-RV64-NEXT:    j .LBB61_285
+; CHECK-RV64-NEXT:  .LBB61_795: # %cond.load1101
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 277
+; CHECK-RV64-NEXT:    li a3, 276
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 42
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_796
+; CHECK-RV64-NEXT:    j .LBB61_286
+; CHECK-RV64-NEXT:  .LBB61_796: # %cond.load1105
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 278
+; CHECK-RV64-NEXT:    li a3, 277
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 41
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_797
+; CHECK-RV64-NEXT:    j .LBB61_287
+; CHECK-RV64-NEXT:  .LBB61_797: # %cond.load1109
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 279
+; CHECK-RV64-NEXT:    li a3, 278
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 40
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_798
+; CHECK-RV64-NEXT:    j .LBB61_288
+; CHECK-RV64-NEXT:  .LBB61_798: # %cond.load1113
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 280
+; CHECK-RV64-NEXT:    li a3, 279
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 39
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_799
+; CHECK-RV64-NEXT:    j .LBB61_289
+; CHECK-RV64-NEXT:  .LBB61_799: # %cond.load1117
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 281
+; CHECK-RV64-NEXT:    li a3, 280
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 38
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_800
+; CHECK-RV64-NEXT:    j .LBB61_290
+; CHECK-RV64-NEXT:  .LBB61_800: # %cond.load1121
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 282
+; CHECK-RV64-NEXT:    li a3, 281
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 37
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_801
+; CHECK-RV64-NEXT:    j .LBB61_291
+; CHECK-RV64-NEXT:  .LBB61_801: # %cond.load1125
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 283
+; CHECK-RV64-NEXT:    li a3, 282
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 36
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_802
+; CHECK-RV64-NEXT:    j .LBB61_292
+; CHECK-RV64-NEXT:  .LBB61_802: # %cond.load1129
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 284
+; CHECK-RV64-NEXT:    li a3, 283
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 35
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_803
+; CHECK-RV64-NEXT:    j .LBB61_293
+; CHECK-RV64-NEXT:  .LBB61_803: # %cond.load1133
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 285
+; CHECK-RV64-NEXT:    li a3, 284
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 34
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_804
+; CHECK-RV64-NEXT:    j .LBB61_294
+; CHECK-RV64-NEXT:  .LBB61_804: # %cond.load1137
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 286
+; CHECK-RV64-NEXT:    li a3, 285
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 33
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_805
+; CHECK-RV64-NEXT:    j .LBB61_295
+; CHECK-RV64-NEXT:  .LBB61_805: # %cond.load1141
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 287
+; CHECK-RV64-NEXT:    li a3, 286
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 32
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_806
+; CHECK-RV64-NEXT:    j .LBB61_296
+; CHECK-RV64-NEXT:  .LBB61_806: # %cond.load1145
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 288
+; CHECK-RV64-NEXT:    li a3, 287
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 31
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_807
+; CHECK-RV64-NEXT:    j .LBB61_297
+; CHECK-RV64-NEXT:  .LBB61_807: # %cond.load1149
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 289
+; CHECK-RV64-NEXT:    li a3, 288
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 30
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_808
+; CHECK-RV64-NEXT:    j .LBB61_298
+; CHECK-RV64-NEXT:  .LBB61_808: # %cond.load1153
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 290
+; CHECK-RV64-NEXT:    li a3, 289
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 29
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_809
+; CHECK-RV64-NEXT:    j .LBB61_299
+; CHECK-RV64-NEXT:  .LBB61_809: # %cond.load1157
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 291
+; CHECK-RV64-NEXT:    li a3, 290
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 28
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_810
+; CHECK-RV64-NEXT:    j .LBB61_300
+; CHECK-RV64-NEXT:  .LBB61_810: # %cond.load1161
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 292
+; CHECK-RV64-NEXT:    li a3, 291
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 27
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_811
+; CHECK-RV64-NEXT:    j .LBB61_301
+; CHECK-RV64-NEXT:  .LBB61_811: # %cond.load1165
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 293
+; CHECK-RV64-NEXT:    li a3, 292
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 26
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_812
+; CHECK-RV64-NEXT:    j .LBB61_302
+; CHECK-RV64-NEXT:  .LBB61_812: # %cond.load1169
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 294
+; CHECK-RV64-NEXT:    li a3, 293
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 25
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_813
+; CHECK-RV64-NEXT:    j .LBB61_303
+; CHECK-RV64-NEXT:  .LBB61_813: # %cond.load1173
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 295
+; CHECK-RV64-NEXT:    li a3, 294
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 24
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_814
+; CHECK-RV64-NEXT:    j .LBB61_304
+; CHECK-RV64-NEXT:  .LBB61_814: # %cond.load1177
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 296
+; CHECK-RV64-NEXT:    li a3, 295
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 23
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_815
+; CHECK-RV64-NEXT:    j .LBB61_305
+; CHECK-RV64-NEXT:  .LBB61_815: # %cond.load1181
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 297
+; CHECK-RV64-NEXT:    li a3, 296
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 22
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_816
+; CHECK-RV64-NEXT:    j .LBB61_306
+; CHECK-RV64-NEXT:  .LBB61_816: # %cond.load1185
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 298
+; CHECK-RV64-NEXT:    li a3, 297
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 21
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_817
+; CHECK-RV64-NEXT:    j .LBB61_307
+; CHECK-RV64-NEXT:  .LBB61_817: # %cond.load1189
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 299
+; CHECK-RV64-NEXT:    li a3, 298
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 20
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_818
+; CHECK-RV64-NEXT:    j .LBB61_308
+; CHECK-RV64-NEXT:  .LBB61_818: # %cond.load1193
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 300
+; CHECK-RV64-NEXT:    li a3, 299
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 19
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_819
+; CHECK-RV64-NEXT:    j .LBB61_309
+; CHECK-RV64-NEXT:  .LBB61_819: # %cond.load1197
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 301
+; CHECK-RV64-NEXT:    li a3, 300
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 18
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_820
+; CHECK-RV64-NEXT:    j .LBB61_310
+; CHECK-RV64-NEXT:  .LBB61_820: # %cond.load1201
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 302
+; CHECK-RV64-NEXT:    li a3, 301
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 17
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_821
+; CHECK-RV64-NEXT:    j .LBB61_311
+; CHECK-RV64-NEXT:  .LBB61_821: # %cond.load1205
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 303
+; CHECK-RV64-NEXT:    li a3, 302
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 16
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_822
+; CHECK-RV64-NEXT:    j .LBB61_312
+; CHECK-RV64-NEXT:  .LBB61_822: # %cond.load1209
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 304
+; CHECK-RV64-NEXT:    li a3, 303
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 15
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_823
+; CHECK-RV64-NEXT:    j .LBB61_313
+; CHECK-RV64-NEXT:  .LBB61_823: # %cond.load1213
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 305
+; CHECK-RV64-NEXT:    li a3, 304
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 14
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_824
+; CHECK-RV64-NEXT:    j .LBB61_314
+; CHECK-RV64-NEXT:  .LBB61_824: # %cond.load1217
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 306
+; CHECK-RV64-NEXT:    li a3, 305
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 13
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_825
+; CHECK-RV64-NEXT:    j .LBB61_315
+; CHECK-RV64-NEXT:  .LBB61_825: # %cond.load1221
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 307
+; CHECK-RV64-NEXT:    li a3, 306
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 12
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_826
+; CHECK-RV64-NEXT:    j .LBB61_316
+; CHECK-RV64-NEXT:  .LBB61_826: # %cond.load1225
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 308
+; CHECK-RV64-NEXT:    li a3, 307
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 11
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_827
+; CHECK-RV64-NEXT:    j .LBB61_317
+; CHECK-RV64-NEXT:  .LBB61_827: # %cond.load1229
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 309
+; CHECK-RV64-NEXT:    li a3, 308
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 10
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_828
+; CHECK-RV64-NEXT:    j .LBB61_318
+; CHECK-RV64-NEXT:  .LBB61_828: # %cond.load1233
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 310
+; CHECK-RV64-NEXT:    li a3, 309
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 9
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_829
+; CHECK-RV64-NEXT:    j .LBB61_319
+; CHECK-RV64-NEXT:  .LBB61_829: # %cond.load1237
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 311
+; CHECK-RV64-NEXT:    li a3, 310
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 8
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_830
+; CHECK-RV64-NEXT:    j .LBB61_320
+; CHECK-RV64-NEXT:  .LBB61_830: # %cond.load1241
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 312
+; CHECK-RV64-NEXT:    li a3, 311
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 7
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_831
+; CHECK-RV64-NEXT:    j .LBB61_321
+; CHECK-RV64-NEXT:  .LBB61_831: # %cond.load1245
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 313
+; CHECK-RV64-NEXT:    li a3, 312
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 6
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_832
+; CHECK-RV64-NEXT:    j .LBB61_322
+; CHECK-RV64-NEXT:  .LBB61_832: # %cond.load1249
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 314
+; CHECK-RV64-NEXT:    li a3, 313
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 5
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_833
+; CHECK-RV64-NEXT:    j .LBB61_323
+; CHECK-RV64-NEXT:  .LBB61_833: # %cond.load1253
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 315
+; CHECK-RV64-NEXT:    li a3, 314
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 4
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_834
+; CHECK-RV64-NEXT:    j .LBB61_324
+; CHECK-RV64-NEXT:  .LBB61_834: # %cond.load1257
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 316
+; CHECK-RV64-NEXT:    li a3, 315
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 3
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_835
+; CHECK-RV64-NEXT:    j .LBB61_325
+; CHECK-RV64-NEXT:  .LBB61_835: # %cond.load1261
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 317
+; CHECK-RV64-NEXT:    li a3, 316
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 2
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_1029
+; CHECK-RV64-NEXT:    j .LBB61_326
+; CHECK-RV64-NEXT:  .LBB61_1029: # %cond.load1261
+; CHECK-RV64-NEXT:    j .LBB61_327
+; CHECK-RV64-NEXT:  .LBB61_836: # %cond.load1273
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 320
+; CHECK-RV64-NEXT:    li a3, 319
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 1
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_837
+; CHECK-RV64-NEXT:    j .LBB61_331
+; CHECK-RV64-NEXT:  .LBB61_837: # %cond.load1277
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 321
+; CHECK-RV64-NEXT:    li a3, 320
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 2
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_838
+; CHECK-RV64-NEXT:    j .LBB61_332
+; CHECK-RV64-NEXT:  .LBB61_838: # %cond.load1281
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 322
+; CHECK-RV64-NEXT:    li a3, 321
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 4
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_839
+; CHECK-RV64-NEXT:    j .LBB61_333
+; CHECK-RV64-NEXT:  .LBB61_839: # %cond.load1285
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 323
+; CHECK-RV64-NEXT:    li a3, 322
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 8
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_840
+; CHECK-RV64-NEXT:    j .LBB61_334
+; CHECK-RV64-NEXT:  .LBB61_840: # %cond.load1289
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 324
+; CHECK-RV64-NEXT:    li a3, 323
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 16
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_841
+; CHECK-RV64-NEXT:    j .LBB61_335
+; CHECK-RV64-NEXT:  .LBB61_841: # %cond.load1293
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 325
+; CHECK-RV64-NEXT:    li a3, 324
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 32
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_842
+; CHECK-RV64-NEXT:    j .LBB61_336
+; CHECK-RV64-NEXT:  .LBB61_842: # %cond.load1297
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 326
+; CHECK-RV64-NEXT:    li a3, 325
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 64
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_843
+; CHECK-RV64-NEXT:    j .LBB61_337
+; CHECK-RV64-NEXT:  .LBB61_843: # %cond.load1301
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 327
+; CHECK-RV64-NEXT:    li a3, 326
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 128
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_844
+; CHECK-RV64-NEXT:    j .LBB61_338
+; CHECK-RV64-NEXT:  .LBB61_844: # %cond.load1305
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 328
+; CHECK-RV64-NEXT:    li a3, 327
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 256
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_845
+; CHECK-RV64-NEXT:    j .LBB61_339
+; CHECK-RV64-NEXT:  .LBB61_845: # %cond.load1309
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 329
+; CHECK-RV64-NEXT:    li a3, 328
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 512
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_846
+; CHECK-RV64-NEXT:    j .LBB61_340
+; CHECK-RV64-NEXT:  .LBB61_846: # %cond.load1313
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 330
+; CHECK-RV64-NEXT:    li a3, 329
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 1024
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_847
+; CHECK-RV64-NEXT:    j .LBB61_341
+; CHECK-RV64-NEXT:  .LBB61_847: # %cond.load1317
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 331
+; CHECK-RV64-NEXT:    li a3, 330
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 52
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_848
+; CHECK-RV64-NEXT:    j .LBB61_342
+; CHECK-RV64-NEXT:  .LBB61_848: # %cond.load1321
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 332
+; CHECK-RV64-NEXT:    li a3, 331
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 51
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_849
+; CHECK-RV64-NEXT:    j .LBB61_343
+; CHECK-RV64-NEXT:  .LBB61_849: # %cond.load1325
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 333
+; CHECK-RV64-NEXT:    li a3, 332
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 50
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_850
+; CHECK-RV64-NEXT:    j .LBB61_344
+; CHECK-RV64-NEXT:  .LBB61_850: # %cond.load1329
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 334
+; CHECK-RV64-NEXT:    li a3, 333
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 49
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_851
+; CHECK-RV64-NEXT:    j .LBB61_345
+; CHECK-RV64-NEXT:  .LBB61_851: # %cond.load1333
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 335
+; CHECK-RV64-NEXT:    li a3, 334
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 48
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_852
+; CHECK-RV64-NEXT:    j .LBB61_346
+; CHECK-RV64-NEXT:  .LBB61_852: # %cond.load1337
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 336
+; CHECK-RV64-NEXT:    li a3, 335
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 47
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_853
+; CHECK-RV64-NEXT:    j .LBB61_347
+; CHECK-RV64-NEXT:  .LBB61_853: # %cond.load1341
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 337
+; CHECK-RV64-NEXT:    li a3, 336
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 46
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_854
+; CHECK-RV64-NEXT:    j .LBB61_348
+; CHECK-RV64-NEXT:  .LBB61_854: # %cond.load1345
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 338
+; CHECK-RV64-NEXT:    li a3, 337
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 45
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_855
+; CHECK-RV64-NEXT:    j .LBB61_349
+; CHECK-RV64-NEXT:  .LBB61_855: # %cond.load1349
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 339
+; CHECK-RV64-NEXT:    li a3, 338
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 44
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_856
+; CHECK-RV64-NEXT:    j .LBB61_350
+; CHECK-RV64-NEXT:  .LBB61_856: # %cond.load1353
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 340
+; CHECK-RV64-NEXT:    li a3, 339
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 43
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_857
+; CHECK-RV64-NEXT:    j .LBB61_351
+; CHECK-RV64-NEXT:  .LBB61_857: # %cond.load1357
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 341
+; CHECK-RV64-NEXT:    li a3, 340
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 42
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_858
+; CHECK-RV64-NEXT:    j .LBB61_352
+; CHECK-RV64-NEXT:  .LBB61_858: # %cond.load1361
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 342
+; CHECK-RV64-NEXT:    li a3, 341
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 41
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_859
+; CHECK-RV64-NEXT:    j .LBB61_353
+; CHECK-RV64-NEXT:  .LBB61_859: # %cond.load1365
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 343
+; CHECK-RV64-NEXT:    li a3, 342
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 40
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_860
+; CHECK-RV64-NEXT:    j .LBB61_354
+; CHECK-RV64-NEXT:  .LBB61_860: # %cond.load1369
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 344
+; CHECK-RV64-NEXT:    li a3, 343
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 39
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_861
+; CHECK-RV64-NEXT:    j .LBB61_355
+; CHECK-RV64-NEXT:  .LBB61_861: # %cond.load1373
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 345
+; CHECK-RV64-NEXT:    li a3, 344
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 38
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_862
+; CHECK-RV64-NEXT:    j .LBB61_356
+; CHECK-RV64-NEXT:  .LBB61_862: # %cond.load1377
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 346
+; CHECK-RV64-NEXT:    li a3, 345
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 37
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_863
+; CHECK-RV64-NEXT:    j .LBB61_357
+; CHECK-RV64-NEXT:  .LBB61_863: # %cond.load1381
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 347
+; CHECK-RV64-NEXT:    li a3, 346
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 36
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_864
+; CHECK-RV64-NEXT:    j .LBB61_358
+; CHECK-RV64-NEXT:  .LBB61_864: # %cond.load1385
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 348
+; CHECK-RV64-NEXT:    li a3, 347
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 35
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_865
+; CHECK-RV64-NEXT:    j .LBB61_359
+; CHECK-RV64-NEXT:  .LBB61_865: # %cond.load1389
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 349
+; CHECK-RV64-NEXT:    li a3, 348
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 34
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_866
+; CHECK-RV64-NEXT:    j .LBB61_360
+; CHECK-RV64-NEXT:  .LBB61_866: # %cond.load1393
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 350
+; CHECK-RV64-NEXT:    li a3, 349
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 33
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_867
+; CHECK-RV64-NEXT:    j .LBB61_361
+; CHECK-RV64-NEXT:  .LBB61_867: # %cond.load1397
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 351
+; CHECK-RV64-NEXT:    li a3, 350
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 32
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_868
+; CHECK-RV64-NEXT:    j .LBB61_362
+; CHECK-RV64-NEXT:  .LBB61_868: # %cond.load1401
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 352
+; CHECK-RV64-NEXT:    li a3, 351
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 31
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_869
+; CHECK-RV64-NEXT:    j .LBB61_363
+; CHECK-RV64-NEXT:  .LBB61_869: # %cond.load1405
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 353
+; CHECK-RV64-NEXT:    li a3, 352
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 30
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_870
+; CHECK-RV64-NEXT:    j .LBB61_364
+; CHECK-RV64-NEXT:  .LBB61_870: # %cond.load1409
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 354
+; CHECK-RV64-NEXT:    li a3, 353
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 29
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_871
+; CHECK-RV64-NEXT:    j .LBB61_365
+; CHECK-RV64-NEXT:  .LBB61_871: # %cond.load1413
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 355
+; CHECK-RV64-NEXT:    li a3, 354
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 28
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_872
+; CHECK-RV64-NEXT:    j .LBB61_366
+; CHECK-RV64-NEXT:  .LBB61_872: # %cond.load1417
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 356
+; CHECK-RV64-NEXT:    li a3, 355
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 27
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_873
+; CHECK-RV64-NEXT:    j .LBB61_367
+; CHECK-RV64-NEXT:  .LBB61_873: # %cond.load1421
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 357
+; CHECK-RV64-NEXT:    li a3, 356
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 26
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_874
+; CHECK-RV64-NEXT:    j .LBB61_368
+; CHECK-RV64-NEXT:  .LBB61_874: # %cond.load1425
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 358
+; CHECK-RV64-NEXT:    li a3, 357
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 25
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_875
+; CHECK-RV64-NEXT:    j .LBB61_369
+; CHECK-RV64-NEXT:  .LBB61_875: # %cond.load1429
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 359
+; CHECK-RV64-NEXT:    li a3, 358
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 24
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_876
+; CHECK-RV64-NEXT:    j .LBB61_370
+; CHECK-RV64-NEXT:  .LBB61_876: # %cond.load1433
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 360
+; CHECK-RV64-NEXT:    li a3, 359
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 23
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_877
+; CHECK-RV64-NEXT:    j .LBB61_371
+; CHECK-RV64-NEXT:  .LBB61_877: # %cond.load1437
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 361
+; CHECK-RV64-NEXT:    li a3, 360
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 22
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_878
+; CHECK-RV64-NEXT:    j .LBB61_372
+; CHECK-RV64-NEXT:  .LBB61_878: # %cond.load1441
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 362
+; CHECK-RV64-NEXT:    li a3, 361
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 21
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_879
+; CHECK-RV64-NEXT:    j .LBB61_373
+; CHECK-RV64-NEXT:  .LBB61_879: # %cond.load1445
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 363
+; CHECK-RV64-NEXT:    li a3, 362
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 20
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_880
+; CHECK-RV64-NEXT:    j .LBB61_374
+; CHECK-RV64-NEXT:  .LBB61_880: # %cond.load1449
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 364
+; CHECK-RV64-NEXT:    li a3, 363
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 19
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_881
+; CHECK-RV64-NEXT:    j .LBB61_375
+; CHECK-RV64-NEXT:  .LBB61_881: # %cond.load1453
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 365
+; CHECK-RV64-NEXT:    li a3, 364
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 18
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_882
+; CHECK-RV64-NEXT:    j .LBB61_376
+; CHECK-RV64-NEXT:  .LBB61_882: # %cond.load1457
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 366
+; CHECK-RV64-NEXT:    li a3, 365
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 17
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_883
+; CHECK-RV64-NEXT:    j .LBB61_377
+; CHECK-RV64-NEXT:  .LBB61_883: # %cond.load1461
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 367
+; CHECK-RV64-NEXT:    li a3, 366
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 16
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_884
+; CHECK-RV64-NEXT:    j .LBB61_378
+; CHECK-RV64-NEXT:  .LBB61_884: # %cond.load1465
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 368
+; CHECK-RV64-NEXT:    li a3, 367
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 15
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_885
+; CHECK-RV64-NEXT:    j .LBB61_379
+; CHECK-RV64-NEXT:  .LBB61_885: # %cond.load1469
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 369
+; CHECK-RV64-NEXT:    li a3, 368
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 14
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_886
+; CHECK-RV64-NEXT:    j .LBB61_380
+; CHECK-RV64-NEXT:  .LBB61_886: # %cond.load1473
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 370
+; CHECK-RV64-NEXT:    li a3, 369
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 13
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_887
+; CHECK-RV64-NEXT:    j .LBB61_381
+; CHECK-RV64-NEXT:  .LBB61_887: # %cond.load1477
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 371
+; CHECK-RV64-NEXT:    li a3, 370
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 12
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_888
+; CHECK-RV64-NEXT:    j .LBB61_382
+; CHECK-RV64-NEXT:  .LBB61_888: # %cond.load1481
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 372
+; CHECK-RV64-NEXT:    li a3, 371
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 11
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_889
+; CHECK-RV64-NEXT:    j .LBB61_383
+; CHECK-RV64-NEXT:  .LBB61_889: # %cond.load1485
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 373
+; CHECK-RV64-NEXT:    li a3, 372
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 10
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_890
+; CHECK-RV64-NEXT:    j .LBB61_384
+; CHECK-RV64-NEXT:  .LBB61_890: # %cond.load1489
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 374
+; CHECK-RV64-NEXT:    li a3, 373
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 9
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_891
+; CHECK-RV64-NEXT:    j .LBB61_385
+; CHECK-RV64-NEXT:  .LBB61_891: # %cond.load1493
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 375
+; CHECK-RV64-NEXT:    li a3, 374
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 8
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_892
+; CHECK-RV64-NEXT:    j .LBB61_386
+; CHECK-RV64-NEXT:  .LBB61_892: # %cond.load1497
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 376
+; CHECK-RV64-NEXT:    li a3, 375
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 7
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_893
+; CHECK-RV64-NEXT:    j .LBB61_387
+; CHECK-RV64-NEXT:  .LBB61_893: # %cond.load1501
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 377
+; CHECK-RV64-NEXT:    li a3, 376
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 6
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_894
+; CHECK-RV64-NEXT:    j .LBB61_388
+; CHECK-RV64-NEXT:  .LBB61_894: # %cond.load1505
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 378
+; CHECK-RV64-NEXT:    li a3, 377
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 5
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_895
+; CHECK-RV64-NEXT:    j .LBB61_389
+; CHECK-RV64-NEXT:  .LBB61_895: # %cond.load1509
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 379
+; CHECK-RV64-NEXT:    li a3, 378
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 4
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_896
+; CHECK-RV64-NEXT:    j .LBB61_390
+; CHECK-RV64-NEXT:  .LBB61_896: # %cond.load1513
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 380
+; CHECK-RV64-NEXT:    li a3, 379
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 3
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_897
+; CHECK-RV64-NEXT:    j .LBB61_391
+; CHECK-RV64-NEXT:  .LBB61_897: # %cond.load1517
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 381
+; CHECK-RV64-NEXT:    li a3, 380
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 2
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_1030
+; CHECK-RV64-NEXT:    j .LBB61_392
+; CHECK-RV64-NEXT:  .LBB61_1030: # %cond.load1517
+; CHECK-RV64-NEXT:    j .LBB61_393
+; CHECK-RV64-NEXT:  .LBB61_898: # %cond.load1529
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 384
+; CHECK-RV64-NEXT:    li a3, 383
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 1
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_899
+; CHECK-RV64-NEXT:    j .LBB61_397
+; CHECK-RV64-NEXT:  .LBB61_899: # %cond.load1533
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 385
+; CHECK-RV64-NEXT:    li a3, 384
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 2
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_900
+; CHECK-RV64-NEXT:    j .LBB61_398
+; CHECK-RV64-NEXT:  .LBB61_900: # %cond.load1537
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 386
+; CHECK-RV64-NEXT:    li a3, 385
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 4
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_901
+; CHECK-RV64-NEXT:    j .LBB61_399
+; CHECK-RV64-NEXT:  .LBB61_901: # %cond.load1541
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 387
+; CHECK-RV64-NEXT:    li a3, 386
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 8
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_902
+; CHECK-RV64-NEXT:    j .LBB61_400
+; CHECK-RV64-NEXT:  .LBB61_902: # %cond.load1545
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 388
+; CHECK-RV64-NEXT:    li a3, 387
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 16
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_903
+; CHECK-RV64-NEXT:    j .LBB61_401
+; CHECK-RV64-NEXT:  .LBB61_903: # %cond.load1549
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 389
+; CHECK-RV64-NEXT:    li a3, 388
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 32
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_904
+; CHECK-RV64-NEXT:    j .LBB61_402
+; CHECK-RV64-NEXT:  .LBB61_904: # %cond.load1553
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 390
+; CHECK-RV64-NEXT:    li a3, 389
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 64
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_905
+; CHECK-RV64-NEXT:    j .LBB61_403
+; CHECK-RV64-NEXT:  .LBB61_905: # %cond.load1557
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 391
+; CHECK-RV64-NEXT:    li a3, 390
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 128
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_906
+; CHECK-RV64-NEXT:    j .LBB61_404
+; CHECK-RV64-NEXT:  .LBB61_906: # %cond.load1561
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 392
+; CHECK-RV64-NEXT:    li a3, 391
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 256
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_907
+; CHECK-RV64-NEXT:    j .LBB61_405
+; CHECK-RV64-NEXT:  .LBB61_907: # %cond.load1565
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 393
+; CHECK-RV64-NEXT:    li a3, 392
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 512
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_908
+; CHECK-RV64-NEXT:    j .LBB61_406
+; CHECK-RV64-NEXT:  .LBB61_908: # %cond.load1569
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 394
+; CHECK-RV64-NEXT:    li a3, 393
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 1024
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_909
+; CHECK-RV64-NEXT:    j .LBB61_407
+; CHECK-RV64-NEXT:  .LBB61_909: # %cond.load1573
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 395
+; CHECK-RV64-NEXT:    li a3, 394
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 52
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_910
+; CHECK-RV64-NEXT:    j .LBB61_408
+; CHECK-RV64-NEXT:  .LBB61_910: # %cond.load1577
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 396
+; CHECK-RV64-NEXT:    li a3, 395
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 51
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_911
+; CHECK-RV64-NEXT:    j .LBB61_409
+; CHECK-RV64-NEXT:  .LBB61_911: # %cond.load1581
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 397
+; CHECK-RV64-NEXT:    li a3, 396
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 50
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_912
+; CHECK-RV64-NEXT:    j .LBB61_410
+; CHECK-RV64-NEXT:  .LBB61_912: # %cond.load1585
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 398
+; CHECK-RV64-NEXT:    li a3, 397
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 49
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_913
+; CHECK-RV64-NEXT:    j .LBB61_411
+; CHECK-RV64-NEXT:  .LBB61_913: # %cond.load1589
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 399
+; CHECK-RV64-NEXT:    li a3, 398
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 48
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_914
+; CHECK-RV64-NEXT:    j .LBB61_412
+; CHECK-RV64-NEXT:  .LBB61_914: # %cond.load1593
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 400
+; CHECK-RV64-NEXT:    li a3, 399
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 47
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_915
+; CHECK-RV64-NEXT:    j .LBB61_413
+; CHECK-RV64-NEXT:  .LBB61_915: # %cond.load1597
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 401
+; CHECK-RV64-NEXT:    li a3, 400
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 46
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_916
+; CHECK-RV64-NEXT:    j .LBB61_414
+; CHECK-RV64-NEXT:  .LBB61_916: # %cond.load1601
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 402
+; CHECK-RV64-NEXT:    li a3, 401
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 45
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_917
+; CHECK-RV64-NEXT:    j .LBB61_415
+; CHECK-RV64-NEXT:  .LBB61_917: # %cond.load1605
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 403
+; CHECK-RV64-NEXT:    li a3, 402
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 44
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_918
+; CHECK-RV64-NEXT:    j .LBB61_416
+; CHECK-RV64-NEXT:  .LBB61_918: # %cond.load1609
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 404
+; CHECK-RV64-NEXT:    li a3, 403
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 43
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_919
+; CHECK-RV64-NEXT:    j .LBB61_417
+; CHECK-RV64-NEXT:  .LBB61_919: # %cond.load1613
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 405
+; CHECK-RV64-NEXT:    li a3, 404
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 42
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_920
+; CHECK-RV64-NEXT:    j .LBB61_418
+; CHECK-RV64-NEXT:  .LBB61_920: # %cond.load1617
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 406
+; CHECK-RV64-NEXT:    li a3, 405
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 41
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_921
+; CHECK-RV64-NEXT:    j .LBB61_419
+; CHECK-RV64-NEXT:  .LBB61_921: # %cond.load1621
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 407
+; CHECK-RV64-NEXT:    li a3, 406
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 40
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_922
+; CHECK-RV64-NEXT:    j .LBB61_420
+; CHECK-RV64-NEXT:  .LBB61_922: # %cond.load1625
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 408
+; CHECK-RV64-NEXT:    li a3, 407
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 39
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_923
+; CHECK-RV64-NEXT:    j .LBB61_421
+; CHECK-RV64-NEXT:  .LBB61_923: # %cond.load1629
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 409
+; CHECK-RV64-NEXT:    li a3, 408
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 38
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_924
+; CHECK-RV64-NEXT:    j .LBB61_422
+; CHECK-RV64-NEXT:  .LBB61_924: # %cond.load1633
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 410
+; CHECK-RV64-NEXT:    li a3, 409
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 37
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_925
+; CHECK-RV64-NEXT:    j .LBB61_423
+; CHECK-RV64-NEXT:  .LBB61_925: # %cond.load1637
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 411
+; CHECK-RV64-NEXT:    li a3, 410
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 36
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_926
+; CHECK-RV64-NEXT:    j .LBB61_424
+; CHECK-RV64-NEXT:  .LBB61_926: # %cond.load1641
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 412
+; CHECK-RV64-NEXT:    li a3, 411
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 35
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_927
+; CHECK-RV64-NEXT:    j .LBB61_425
+; CHECK-RV64-NEXT:  .LBB61_927: # %cond.load1645
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 413
+; CHECK-RV64-NEXT:    li a3, 412
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 34
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_928
+; CHECK-RV64-NEXT:    j .LBB61_426
+; CHECK-RV64-NEXT:  .LBB61_928: # %cond.load1649
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 414
+; CHECK-RV64-NEXT:    li a3, 413
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 33
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_929
+; CHECK-RV64-NEXT:    j .LBB61_427
+; CHECK-RV64-NEXT:  .LBB61_929: # %cond.load1653
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 415
+; CHECK-RV64-NEXT:    li a3, 414
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 32
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_930
+; CHECK-RV64-NEXT:    j .LBB61_428
+; CHECK-RV64-NEXT:  .LBB61_930: # %cond.load1657
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 416
+; CHECK-RV64-NEXT:    li a3, 415
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 31
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_931
+; CHECK-RV64-NEXT:    j .LBB61_429
+; CHECK-RV64-NEXT:  .LBB61_931: # %cond.load1661
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 417
+; CHECK-RV64-NEXT:    li a3, 416
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 30
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_932
+; CHECK-RV64-NEXT:    j .LBB61_430
+; CHECK-RV64-NEXT:  .LBB61_932: # %cond.load1665
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 418
+; CHECK-RV64-NEXT:    li a3, 417
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 29
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_933
+; CHECK-RV64-NEXT:    j .LBB61_431
+; CHECK-RV64-NEXT:  .LBB61_933: # %cond.load1669
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 419
+; CHECK-RV64-NEXT:    li a3, 418
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 28
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_934
+; CHECK-RV64-NEXT:    j .LBB61_432
+; CHECK-RV64-NEXT:  .LBB61_934: # %cond.load1673
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 420
+; CHECK-RV64-NEXT:    li a3, 419
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 27
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_935
+; CHECK-RV64-NEXT:    j .LBB61_433
+; CHECK-RV64-NEXT:  .LBB61_935: # %cond.load1677
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 421
+; CHECK-RV64-NEXT:    li a3, 420
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 26
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_936
+; CHECK-RV64-NEXT:    j .LBB61_434
+; CHECK-RV64-NEXT:  .LBB61_936: # %cond.load1681
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 422
+; CHECK-RV64-NEXT:    li a3, 421
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 25
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_937
+; CHECK-RV64-NEXT:    j .LBB61_435
+; CHECK-RV64-NEXT:  .LBB61_937: # %cond.load1685
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 423
+; CHECK-RV64-NEXT:    li a3, 422
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 24
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_938
+; CHECK-RV64-NEXT:    j .LBB61_436
+; CHECK-RV64-NEXT:  .LBB61_938: # %cond.load1689
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 424
+; CHECK-RV64-NEXT:    li a3, 423
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 23
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_939
+; CHECK-RV64-NEXT:    j .LBB61_437
+; CHECK-RV64-NEXT:  .LBB61_939: # %cond.load1693
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 425
+; CHECK-RV64-NEXT:    li a3, 424
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 22
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_940
+; CHECK-RV64-NEXT:    j .LBB61_438
+; CHECK-RV64-NEXT:  .LBB61_940: # %cond.load1697
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 426
+; CHECK-RV64-NEXT:    li a3, 425
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 21
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_941
+; CHECK-RV64-NEXT:    j .LBB61_439
+; CHECK-RV64-NEXT:  .LBB61_941: # %cond.load1701
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 427
+; CHECK-RV64-NEXT:    li a3, 426
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 20
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_942
+; CHECK-RV64-NEXT:    j .LBB61_440
+; CHECK-RV64-NEXT:  .LBB61_942: # %cond.load1705
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 428
+; CHECK-RV64-NEXT:    li a3, 427
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 19
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_943
+; CHECK-RV64-NEXT:    j .LBB61_441
+; CHECK-RV64-NEXT:  .LBB61_943: # %cond.load1709
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 429
+; CHECK-RV64-NEXT:    li a3, 428
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 18
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_944
+; CHECK-RV64-NEXT:    j .LBB61_442
+; CHECK-RV64-NEXT:  .LBB61_944: # %cond.load1713
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 430
+; CHECK-RV64-NEXT:    li a3, 429
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 17
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_945
+; CHECK-RV64-NEXT:    j .LBB61_443
+; CHECK-RV64-NEXT:  .LBB61_945: # %cond.load1717
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 431
+; CHECK-RV64-NEXT:    li a3, 430
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 16
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_946
+; CHECK-RV64-NEXT:    j .LBB61_444
+; CHECK-RV64-NEXT:  .LBB61_946: # %cond.load1721
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 432
+; CHECK-RV64-NEXT:    li a3, 431
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 15
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_947
+; CHECK-RV64-NEXT:    j .LBB61_445
+; CHECK-RV64-NEXT:  .LBB61_947: # %cond.load1725
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 433
+; CHECK-RV64-NEXT:    li a3, 432
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 14
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_948
+; CHECK-RV64-NEXT:    j .LBB61_446
+; CHECK-RV64-NEXT:  .LBB61_948: # %cond.load1729
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 434
+; CHECK-RV64-NEXT:    li a3, 433
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 13
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_949
+; CHECK-RV64-NEXT:    j .LBB61_447
+; CHECK-RV64-NEXT:  .LBB61_949: # %cond.load1733
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 435
+; CHECK-RV64-NEXT:    li a3, 434
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 12
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_950
+; CHECK-RV64-NEXT:    j .LBB61_448
+; CHECK-RV64-NEXT:  .LBB61_950: # %cond.load1737
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 436
+; CHECK-RV64-NEXT:    li a3, 435
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 11
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_951
+; CHECK-RV64-NEXT:    j .LBB61_449
+; CHECK-RV64-NEXT:  .LBB61_951: # %cond.load1741
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 437
+; CHECK-RV64-NEXT:    li a3, 436
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 10
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_952
+; CHECK-RV64-NEXT:    j .LBB61_450
+; CHECK-RV64-NEXT:  .LBB61_952: # %cond.load1745
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 438
+; CHECK-RV64-NEXT:    li a3, 437
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 9
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_953
+; CHECK-RV64-NEXT:    j .LBB61_451
+; CHECK-RV64-NEXT:  .LBB61_953: # %cond.load1749
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 439
+; CHECK-RV64-NEXT:    li a3, 438
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 8
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_954
+; CHECK-RV64-NEXT:    j .LBB61_452
+; CHECK-RV64-NEXT:  .LBB61_954: # %cond.load1753
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 440
+; CHECK-RV64-NEXT:    li a3, 439
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 7
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_955
+; CHECK-RV64-NEXT:    j .LBB61_453
+; CHECK-RV64-NEXT:  .LBB61_955: # %cond.load1757
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 441
+; CHECK-RV64-NEXT:    li a3, 440
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 6
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_956
+; CHECK-RV64-NEXT:    j .LBB61_454
+; CHECK-RV64-NEXT:  .LBB61_956: # %cond.load1761
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 442
+; CHECK-RV64-NEXT:    li a3, 441
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 5
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_957
+; CHECK-RV64-NEXT:    j .LBB61_455
+; CHECK-RV64-NEXT:  .LBB61_957: # %cond.load1765
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 443
+; CHECK-RV64-NEXT:    li a3, 442
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 4
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_958
+; CHECK-RV64-NEXT:    j .LBB61_456
+; CHECK-RV64-NEXT:  .LBB61_958: # %cond.load1769
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 444
+; CHECK-RV64-NEXT:    li a3, 443
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 3
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_959
+; CHECK-RV64-NEXT:    j .LBB61_457
+; CHECK-RV64-NEXT:  .LBB61_959: # %cond.load1773
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 445
+; CHECK-RV64-NEXT:    li a3, 444
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 2
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_1031
+; CHECK-RV64-NEXT:    j .LBB61_458
+; CHECK-RV64-NEXT:  .LBB61_1031: # %cond.load1773
+; CHECK-RV64-NEXT:    j .LBB61_459
+; CHECK-RV64-NEXT:  .LBB61_960: # %cond.load1785
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 448
+; CHECK-RV64-NEXT:    li a3, 447
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 1
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_961
+; CHECK-RV64-NEXT:    j .LBB61_463
+; CHECK-RV64-NEXT:  .LBB61_961: # %cond.load1789
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 449
+; CHECK-RV64-NEXT:    li a3, 448
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 2
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_962
+; CHECK-RV64-NEXT:    j .LBB61_464
+; CHECK-RV64-NEXT:  .LBB61_962: # %cond.load1793
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 450
+; CHECK-RV64-NEXT:    li a3, 449
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 4
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_963
+; CHECK-RV64-NEXT:    j .LBB61_465
+; CHECK-RV64-NEXT:  .LBB61_963: # %cond.load1797
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 451
+; CHECK-RV64-NEXT:    li a3, 450
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 8
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_964
+; CHECK-RV64-NEXT:    j .LBB61_466
+; CHECK-RV64-NEXT:  .LBB61_964: # %cond.load1801
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 452
+; CHECK-RV64-NEXT:    li a3, 451
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 16
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_965
+; CHECK-RV64-NEXT:    j .LBB61_467
+; CHECK-RV64-NEXT:  .LBB61_965: # %cond.load1805
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 453
+; CHECK-RV64-NEXT:    li a3, 452
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 32
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_966
+; CHECK-RV64-NEXT:    j .LBB61_468
+; CHECK-RV64-NEXT:  .LBB61_966: # %cond.load1809
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 454
+; CHECK-RV64-NEXT:    li a3, 453
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 64
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_967
+; CHECK-RV64-NEXT:    j .LBB61_469
+; CHECK-RV64-NEXT:  .LBB61_967: # %cond.load1813
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 455
+; CHECK-RV64-NEXT:    li a3, 454
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 128
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_968
+; CHECK-RV64-NEXT:    j .LBB61_470
+; CHECK-RV64-NEXT:  .LBB61_968: # %cond.load1817
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 456
+; CHECK-RV64-NEXT:    li a3, 455
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 256
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_969
+; CHECK-RV64-NEXT:    j .LBB61_471
+; CHECK-RV64-NEXT:  .LBB61_969: # %cond.load1821
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 457
+; CHECK-RV64-NEXT:    li a3, 456
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 512
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_970
+; CHECK-RV64-NEXT:    j .LBB61_472
+; CHECK-RV64-NEXT:  .LBB61_970: # %cond.load1825
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 458
+; CHECK-RV64-NEXT:    li a3, 457
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 1024
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_971
+; CHECK-RV64-NEXT:    j .LBB61_473
+; CHECK-RV64-NEXT:  .LBB61_971: # %cond.load1829
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 459
+; CHECK-RV64-NEXT:    li a3, 458
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 52
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_972
+; CHECK-RV64-NEXT:    j .LBB61_474
+; CHECK-RV64-NEXT:  .LBB61_972: # %cond.load1833
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 460
+; CHECK-RV64-NEXT:    li a3, 459
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 51
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_973
+; CHECK-RV64-NEXT:    j .LBB61_475
+; CHECK-RV64-NEXT:  .LBB61_973: # %cond.load1837
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 461
+; CHECK-RV64-NEXT:    li a3, 460
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 50
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_974
+; CHECK-RV64-NEXT:    j .LBB61_476
+; CHECK-RV64-NEXT:  .LBB61_974: # %cond.load1841
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 462
+; CHECK-RV64-NEXT:    li a3, 461
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 49
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_975
+; CHECK-RV64-NEXT:    j .LBB61_477
+; CHECK-RV64-NEXT:  .LBB61_975: # %cond.load1845
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 463
+; CHECK-RV64-NEXT:    li a3, 462
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 48
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_976
+; CHECK-RV64-NEXT:    j .LBB61_478
+; CHECK-RV64-NEXT:  .LBB61_976: # %cond.load1849
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 464
+; CHECK-RV64-NEXT:    li a3, 463
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 47
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_977
+; CHECK-RV64-NEXT:    j .LBB61_479
+; CHECK-RV64-NEXT:  .LBB61_977: # %cond.load1853
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 465
+; CHECK-RV64-NEXT:    li a3, 464
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 46
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_978
+; CHECK-RV64-NEXT:    j .LBB61_480
+; CHECK-RV64-NEXT:  .LBB61_978: # %cond.load1857
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 466
+; CHECK-RV64-NEXT:    li a3, 465
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 45
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_979
+; CHECK-RV64-NEXT:    j .LBB61_481
+; CHECK-RV64-NEXT:  .LBB61_979: # %cond.load1861
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 467
+; CHECK-RV64-NEXT:    li a3, 466
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 44
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_980
+; CHECK-RV64-NEXT:    j .LBB61_482
+; CHECK-RV64-NEXT:  .LBB61_980: # %cond.load1865
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 468
+; CHECK-RV64-NEXT:    li a3, 467
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 43
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_981
+; CHECK-RV64-NEXT:    j .LBB61_483
+; CHECK-RV64-NEXT:  .LBB61_981: # %cond.load1869
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 469
+; CHECK-RV64-NEXT:    li a3, 468
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 42
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_982
+; CHECK-RV64-NEXT:    j .LBB61_484
+; CHECK-RV64-NEXT:  .LBB61_982: # %cond.load1873
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 470
+; CHECK-RV64-NEXT:    li a3, 469
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 41
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_983
+; CHECK-RV64-NEXT:    j .LBB61_485
+; CHECK-RV64-NEXT:  .LBB61_983: # %cond.load1877
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 471
+; CHECK-RV64-NEXT:    li a3, 470
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 40
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_984
+; CHECK-RV64-NEXT:    j .LBB61_486
+; CHECK-RV64-NEXT:  .LBB61_984: # %cond.load1881
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 472
+; CHECK-RV64-NEXT:    li a3, 471
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 39
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_985
+; CHECK-RV64-NEXT:    j .LBB61_487
+; CHECK-RV64-NEXT:  .LBB61_985: # %cond.load1885
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 473
+; CHECK-RV64-NEXT:    li a3, 472
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 38
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_986
+; CHECK-RV64-NEXT:    j .LBB61_488
+; CHECK-RV64-NEXT:  .LBB61_986: # %cond.load1889
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 474
+; CHECK-RV64-NEXT:    li a3, 473
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 37
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_987
+; CHECK-RV64-NEXT:    j .LBB61_489
+; CHECK-RV64-NEXT:  .LBB61_987: # %cond.load1893
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 475
+; CHECK-RV64-NEXT:    li a3, 474
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 36
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_988
+; CHECK-RV64-NEXT:    j .LBB61_490
+; CHECK-RV64-NEXT:  .LBB61_988: # %cond.load1897
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 476
+; CHECK-RV64-NEXT:    li a3, 475
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 35
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_989
+; CHECK-RV64-NEXT:    j .LBB61_491
+; CHECK-RV64-NEXT:  .LBB61_989: # %cond.load1901
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 477
+; CHECK-RV64-NEXT:    li a3, 476
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 34
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_990
+; CHECK-RV64-NEXT:    j .LBB61_492
+; CHECK-RV64-NEXT:  .LBB61_990: # %cond.load1905
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 478
+; CHECK-RV64-NEXT:    li a3, 477
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 33
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_991
+; CHECK-RV64-NEXT:    j .LBB61_493
+; CHECK-RV64-NEXT:  .LBB61_991: # %cond.load1909
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 479
+; CHECK-RV64-NEXT:    li a3, 478
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 32
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_992
+; CHECK-RV64-NEXT:    j .LBB61_494
+; CHECK-RV64-NEXT:  .LBB61_992: # %cond.load1913
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 480
+; CHECK-RV64-NEXT:    li a3, 479
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 31
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_993
+; CHECK-RV64-NEXT:    j .LBB61_495
+; CHECK-RV64-NEXT:  .LBB61_993: # %cond.load1917
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 481
+; CHECK-RV64-NEXT:    li a3, 480
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 30
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_994
+; CHECK-RV64-NEXT:    j .LBB61_496
+; CHECK-RV64-NEXT:  .LBB61_994: # %cond.load1921
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 482
+; CHECK-RV64-NEXT:    li a3, 481
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 29
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_995
+; CHECK-RV64-NEXT:    j .LBB61_497
+; CHECK-RV64-NEXT:  .LBB61_995: # %cond.load1925
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 483
+; CHECK-RV64-NEXT:    li a3, 482
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 28
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_996
+; CHECK-RV64-NEXT:    j .LBB61_498
+; CHECK-RV64-NEXT:  .LBB61_996: # %cond.load1929
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 484
+; CHECK-RV64-NEXT:    li a3, 483
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 27
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_997
+; CHECK-RV64-NEXT:    j .LBB61_499
+; CHECK-RV64-NEXT:  .LBB61_997: # %cond.load1933
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 485
+; CHECK-RV64-NEXT:    li a3, 484
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 26
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_998
+; CHECK-RV64-NEXT:    j .LBB61_500
+; CHECK-RV64-NEXT:  .LBB61_998: # %cond.load1937
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 486
+; CHECK-RV64-NEXT:    li a3, 485
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 25
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_999
+; CHECK-RV64-NEXT:    j .LBB61_501
+; CHECK-RV64-NEXT:  .LBB61_999: # %cond.load1941
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 487
+; CHECK-RV64-NEXT:    li a3, 486
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 24
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1000
+; CHECK-RV64-NEXT:    j .LBB61_502
+; CHECK-RV64-NEXT:  .LBB61_1000: # %cond.load1945
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 488
+; CHECK-RV64-NEXT:    li a3, 487
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 23
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1001
+; CHECK-RV64-NEXT:    j .LBB61_503
+; CHECK-RV64-NEXT:  .LBB61_1001: # %cond.load1949
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 489
+; CHECK-RV64-NEXT:    li a3, 488
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 22
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1002
+; CHECK-RV64-NEXT:    j .LBB61_504
+; CHECK-RV64-NEXT:  .LBB61_1002: # %cond.load1953
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 490
+; CHECK-RV64-NEXT:    li a3, 489
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 21
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1003
+; CHECK-RV64-NEXT:    j .LBB61_505
+; CHECK-RV64-NEXT:  .LBB61_1003: # %cond.load1957
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 491
+; CHECK-RV64-NEXT:    li a3, 490
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 20
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1004
+; CHECK-RV64-NEXT:    j .LBB61_506
+; CHECK-RV64-NEXT:  .LBB61_1004: # %cond.load1961
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 492
+; CHECK-RV64-NEXT:    li a3, 491
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 19
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1005
+; CHECK-RV64-NEXT:    j .LBB61_507
+; CHECK-RV64-NEXT:  .LBB61_1005: # %cond.load1965
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 493
+; CHECK-RV64-NEXT:    li a3, 492
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 18
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1006
+; CHECK-RV64-NEXT:    j .LBB61_508
+; CHECK-RV64-NEXT:  .LBB61_1006: # %cond.load1969
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 494
+; CHECK-RV64-NEXT:    li a3, 493
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 17
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1007
+; CHECK-RV64-NEXT:    j .LBB61_509
+; CHECK-RV64-NEXT:  .LBB61_1007: # %cond.load1973
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 495
+; CHECK-RV64-NEXT:    li a3, 494
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 16
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1008
+; CHECK-RV64-NEXT:    j .LBB61_510
+; CHECK-RV64-NEXT:  .LBB61_1008: # %cond.load1977
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 496
+; CHECK-RV64-NEXT:    li a3, 495
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 15
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1009
+; CHECK-RV64-NEXT:    j .LBB61_511
+; CHECK-RV64-NEXT:  .LBB61_1009: # %cond.load1981
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 497
+; CHECK-RV64-NEXT:    li a3, 496
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 14
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1010
+; CHECK-RV64-NEXT:    j .LBB61_512
+; CHECK-RV64-NEXT:  .LBB61_1010: # %cond.load1985
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 498
+; CHECK-RV64-NEXT:    li a3, 497
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 13
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1011
+; CHECK-RV64-NEXT:    j .LBB61_513
+; CHECK-RV64-NEXT:  .LBB61_1011: # %cond.load1989
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 499
+; CHECK-RV64-NEXT:    li a3, 498
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 12
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1012
+; CHECK-RV64-NEXT:    j .LBB61_514
+; CHECK-RV64-NEXT:  .LBB61_1012: # %cond.load1993
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 500
+; CHECK-RV64-NEXT:    li a3, 499
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 11
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1013
+; CHECK-RV64-NEXT:    j .LBB61_515
+; CHECK-RV64-NEXT:  .LBB61_1013: # %cond.load1997
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 501
+; CHECK-RV64-NEXT:    li a3, 500
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 10
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1014
+; CHECK-RV64-NEXT:    j .LBB61_516
+; CHECK-RV64-NEXT:  .LBB61_1014: # %cond.load2001
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 502
+; CHECK-RV64-NEXT:    li a3, 501
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 9
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1015
+; CHECK-RV64-NEXT:    j .LBB61_517
+; CHECK-RV64-NEXT:  .LBB61_1015: # %cond.load2005
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 503
+; CHECK-RV64-NEXT:    li a3, 502
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 8
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1016
+; CHECK-RV64-NEXT:    j .LBB61_518
+; CHECK-RV64-NEXT:  .LBB61_1016: # %cond.load2009
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 504
+; CHECK-RV64-NEXT:    li a3, 503
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 7
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1017
+; CHECK-RV64-NEXT:    j .LBB61_519
+; CHECK-RV64-NEXT:  .LBB61_1017: # %cond.load2013
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 505
+; CHECK-RV64-NEXT:    li a3, 504
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 6
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1018
+; CHECK-RV64-NEXT:    j .LBB61_520
+; CHECK-RV64-NEXT:  .LBB61_1018: # %cond.load2017
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 506
+; CHECK-RV64-NEXT:    li a3, 505
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 5
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1019
+; CHECK-RV64-NEXT:    j .LBB61_521
+; CHECK-RV64-NEXT:  .LBB61_1019: # %cond.load2021
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 507
+; CHECK-RV64-NEXT:    li a3, 506
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 4
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1020
+; CHECK-RV64-NEXT:    j .LBB61_522
+; CHECK-RV64-NEXT:  .LBB61_1020: # %cond.load2025
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 508
+; CHECK-RV64-NEXT:    li a3, 507
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 3
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1021
+; CHECK-RV64-NEXT:    j .LBB61_523
+; CHECK-RV64-NEXT:  .LBB61_1021: # %cond.load2029
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 509
+; CHECK-RV64-NEXT:    li a3, 508
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 2
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1022
+; CHECK-RV64-NEXT:    j .LBB61_524
+; CHECK-RV64-NEXT:  .LBB61_1022: # %cond.load2033
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 510
+; CHECK-RV64-NEXT:    li a3, 509
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 1
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1023
+; CHECK-RV64-NEXT:    j .LBB61_525
+; CHECK-RV64-NEXT:  .LBB61_1023: # %cond.load2037
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 511
+; CHECK-RV64-NEXT:    li a3, 510
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_1024
+; CHECK-RV64-NEXT:    j .LBB61_526
+; CHECK-RV64-NEXT:  .LBB61_1024: # %cond.load2041
+; CHECK-RV64-NEXT:    lbu a0, 0(a0)
+; CHECK-RV64-NEXT:    li a1, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a0
+; CHECK-RV64-NEXT:    li a0, 511
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a0
+; CHECK-RV64-NEXT:    ret
+  %res = call <512 x i8> @llvm.masked.expandload.v512i8(ptr align 1 %base, <512 x i1> %mask, <512 x i8> %passthru)
+  ret <512 x i8> %res
+}

>From eb5355de2608051c90cc3bf0f72ee81e38624d5a Mon Sep 17 00:00:00 2001
From: Wang Pengcheng <wangpengcheng.pp at bytedance.com>
Date: Wed, 7 Aug 2024 12:12:55 +0800
Subject: [PATCH 11/15] Remove include

---
 llvm/lib/Target/RISCV/RISCVTargetTransformInfo.cpp | 1 -
 1 file changed, 1 deletion(-)

diff --git a/llvm/lib/Target/RISCV/RISCVTargetTransformInfo.cpp b/llvm/lib/Target/RISCV/RISCVTargetTransformInfo.cpp
index dacbf1a3991cde..f1e82cd968506b 100644
--- a/llvm/lib/Target/RISCV/RISCVTargetTransformInfo.cpp
+++ b/llvm/lib/Target/RISCV/RISCVTargetTransformInfo.cpp
@@ -15,7 +15,6 @@
 #include "llvm/CodeGen/TargetLowering.h"
 #include "llvm/IR/Instructions.h"
 #include "llvm/IR/PatternMatch.h"
-#include "llvm/Support/TypeSize.h"
 #include <cmath>
 #include <optional>
 using namespace llvm;

>From 8288bd6c91b9dfdee3df91373a0128cabb0a4dce Mon Sep 17 00:00:00 2001
From: Wang Pengcheng <wangpengcheng.pp at bytedance.com>
Date: Mon, 26 Aug 2024 11:14:16 +0800
Subject: [PATCH 12/15] Add subtarget feature

---
 llvm/lib/Target/RISCV/RISCVFeatures.td        |     4 +
 llvm/lib/Target/RISCV/RISCVISelLowering.cpp   |   100 +-
 llvm/test/CodeGen/RISCV/rvv/expandload.ll     | 58232 +++++++++++-----
 .../RISCV/rvv/fixed-vectors-expandload-fp.ll  |   457 +-
 .../RISCV/rvv/fixed-vectors-expandload-int.ll |   503 +-
 5 files changed, 39669 insertions(+), 19627 deletions(-)

diff --git a/llvm/lib/Target/RISCV/RISCVFeatures.td b/llvm/lib/Target/RISCV/RISCVFeatures.td
index 3d0e1dae801d39..88bad0e7920a6d 100644
--- a/llvm/lib/Target/RISCV/RISCVFeatures.td
+++ b/llvm/lib/Target/RISCV/RISCVFeatures.td
@@ -1359,6 +1359,10 @@ def TuneOptimizedZeroStrideLoad
                       "true", "Optimized (perform fewer memory operations)"
                       "zero-stride vector load">;
 
+def TuneOptimizedIndexedLoadStore
+   : SubtargetFeature<"optimized-indexed-load-store", "HasOptimizedIndexedLoadStore",
+                      "true", "Optimized vector indexed load/store">;
+
 def Experimental
    : SubtargetFeature<"experimental", "HasExperimental",
                       "true", "Experimental intrinsics">;
diff --git a/llvm/lib/Target/RISCV/RISCVISelLowering.cpp b/llvm/lib/Target/RISCV/RISCVISelLowering.cpp
index 6753356e53d336..4f70b7e9b49653 100644
--- a/llvm/lib/Target/RISCV/RISCVISelLowering.cpp
+++ b/llvm/lib/Target/RISCV/RISCVISelLowering.cpp
@@ -11136,8 +11136,9 @@ SDValue RISCVTargetLowering::lowerMaskedLoad(SDValue Op,
   if (!VL)
     VL = getDefaultVLOps(VT, ContainerVT, DL, DAG, Subtarget).second;
 
-  SDValue Index;
-  if (!IsUnmasked && IsExpandingLoad) {
+  SDValue Result;
+  if (!IsUnmasked && IsExpandingLoad &&
+      Subtarget.hasOptimizedIndexedLoadStore()) {
     MVT IndexVT = ContainerVT;
     if (ContainerVT.isFloatingPoint())
       IndexVT = IndexVT.changeVectorElementTypeToInteger();
@@ -11147,7 +11148,8 @@ SDValue RISCVTargetLowering::lowerMaskedLoad(SDValue Op,
       IndexVT = IndexVT.changeVectorElementType(XLenVT);
 
     // If index vector is an i8 vector and the element count exceeds 256, we
-    // should change the element type of index vector to i16 to avoid overflow.
+    // should change the element type of index vector to i16 to avoid
+    // overflow.
     if (IndexEltVT == MVT::i8 && VT.getVectorNumElements() > 256) {
       // FIXME: We need to do vector splitting manually for LMUL=8 cases.
       if (getLMUL(IndexVT) == RISCVII::LMUL_8)
@@ -11155,7 +11157,7 @@ SDValue RISCVTargetLowering::lowerMaskedLoad(SDValue Op,
       IndexVT = IndexVT.changeVectorElementType(MVT::i16);
     }
 
-    Index =
+    SDValue Index =
         DAG.getNode(ISD::INTRINSIC_WO_CHAIN, DL, IndexVT,
                     DAG.getTargetConstant(Intrinsic::riscv_viota, DL, XLenVT),
                     DAG.getUNDEF(IndexVT), Mask, VL);
@@ -11163,31 +11165,81 @@ SDValue RISCVTargetLowering::lowerMaskedLoad(SDValue Op,
       Index = DAG.getNode(RISCVISD::SHL_VL, DL, IndexVT, Index,
                           DAG.getConstant(Log2_64(EltSize / 8), DL, IndexVT),
                           DAG.getUNDEF(IndexVT), Mask, VL);
-  }
-
-  unsigned IntID = IsUnmasked        ? Intrinsic::riscv_vle
-                   : IsExpandingLoad ? Intrinsic::riscv_vluxei_mask
-                                     : Intrinsic::riscv_vle_mask;
-  SmallVector<SDValue, 8> Ops{Chain, DAG.getTargetConstant(IntID, DL, XLenVT)};
-  if (IsUnmasked)
-    Ops.push_back(DAG.getUNDEF(ContainerVT));
-  else
+    unsigned IntID = Intrinsic::riscv_vluxei_mask;
+    SmallVector<SDValue, 8> Ops{Chain,
+                                DAG.getTargetConstant(IntID, DL, XLenVT)};
     Ops.push_back(PassThru);
-  Ops.push_back(BasePtr);
-  if (!IsUnmasked) {
-    if (IsExpandingLoad)
-      Ops.push_back(Index);
+    Ops.push_back(BasePtr);
+    Ops.push_back(Index);
     Ops.push_back(Mask);
-  }
-  Ops.push_back(VL);
-  if (!IsUnmasked)
+    Ops.push_back(VL);
     Ops.push_back(DAG.getTargetConstant(RISCVII::TAIL_AGNOSTIC, DL, XLenVT));
 
-  SDVTList VTs = DAG.getVTList({ContainerVT, MVT::Other});
+    SDVTList VTs = DAG.getVTList({ContainerVT, MVT::Other});
 
-  SDValue Result =
-      DAG.getMemIntrinsicNode(ISD::INTRINSIC_W_CHAIN, DL, VTs, Ops, MemVT, MMO);
-  Chain = Result.getValue(1);
+    Result = DAG.getMemIntrinsicNode(ISD::INTRINSIC_W_CHAIN, DL, VTs, Ops,
+                                     MemVT, MMO);
+    Chain = Result.getValue(1);
+  } else {
+    SDValue ExpandingVL;
+    if (!IsUnmasked && IsExpandingLoad &&
+        !Subtarget.hasOptimizedIndexedLoadStore()) {
+      ExpandingVL = VL;
+      VL = DAG.getNode(RISCVISD::VCPOP_VL, DL, XLenVT, Mask,
+                       getAllOnesMask(Mask.getSimpleValueType(), VL, DL, DAG),
+                       VL);
+    }
+
+    unsigned IntID = IsUnmasked || (IsExpandingLoad &&
+                                    !Subtarget.hasOptimizedIndexedLoadStore())
+                         ? Intrinsic::riscv_vle
+                         : Intrinsic::riscv_vle_mask;
+    SmallVector<SDValue, 8> Ops{Chain,
+                                DAG.getTargetConstant(IntID, DL, XLenVT)};
+    if (IntID == Intrinsic::riscv_vle)
+      Ops.push_back(DAG.getUNDEF(ContainerVT));
+    else
+      Ops.push_back(PassThru);
+    Ops.push_back(BasePtr);
+    if (IntID == Intrinsic::riscv_vle_mask)
+      Ops.push_back(Mask);
+    Ops.push_back(VL);
+    if (IntID == Intrinsic::riscv_vle_mask)
+      Ops.push_back(DAG.getTargetConstant(RISCVII::TAIL_AGNOSTIC, DL, XLenVT));
+
+    SDVTList VTs = DAG.getVTList({ContainerVT, MVT::Other});
+
+    Result = DAG.getMemIntrinsicNode(ISD::INTRINSIC_W_CHAIN, DL, VTs, Ops,
+                                     MemVT, MMO);
+    Chain = Result.getValue(1);
+    if (ExpandingVL) {
+      MVT IndexVT = ContainerVT;
+      if (ContainerVT.isFloatingPoint())
+        IndexVT = ContainerVT.changeVectorElementTypeToInteger();
+
+      MVT IndexEltVT = IndexVT.getVectorElementType();
+      bool UseVRGATHEREI16 = false;
+      // If index vector is an i8 vector and the element count exceeds 256, we
+      // should change the element type of index vector to i16 to avoid
+      // overflow.
+      if (IndexEltVT == MVT::i8 && VT.getVectorNumElements() > 256) {
+        // FIXME: We need to do vector splitting manually for LMUL=8 cases.
+        if (getLMUL(IndexVT) == RISCVII::LMUL_8)
+          return SDValue();
+        IndexVT = IndexVT.changeVectorElementType(MVT::i16);
+        UseVRGATHEREI16 = true;
+      }
+
+      SDValue Iota =
+          DAG.getNode(ISD::INTRINSIC_WO_CHAIN, DL, IndexVT,
+                      DAG.getConstant(Intrinsic::riscv_viota, DL, XLenVT),
+                      DAG.getUNDEF(IndexVT), Mask, ExpandingVL);
+      Result = DAG.getNode(UseVRGATHEREI16 ? RISCVISD::VRGATHEREI16_VV_VL
+                                           : RISCVISD::VRGATHER_VV_VL,
+                           DL, ContainerVT, Result, Iota, PassThru, Mask,
+                           ExpandingVL);
+    }
+  }
 
   if (VT.isFixedLengthVector())
     Result = convertFromScalableVector(VT, Result, DAG, Subtarget);
diff --git a/llvm/test/CodeGen/RISCV/rvv/expandload.ll b/llvm/test/CodeGen/RISCV/rvv/expandload.ll
index 26eee6b28d5647..b087b48061c8ec 100644
--- a/llvm/test/CodeGen/RISCV/rvv/expandload.ll
+++ b/llvm/test/CodeGen/RISCV/rvv/expandload.ll
@@ -1,16 +1,33 @@
 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 5
-; RUN: llc -verify-machineinstrs -mtriple=riscv32 -mattr=+v,+d,+m,+zbb %s -o - | FileCheck %s --check-prefixes=CHECK,CHECK-RV32
-; RUN: llc -verify-machineinstrs -mtriple=riscv64 -mattr=+v,+d,+m,+zbb %s -o - | FileCheck %s --check-prefixes=CHECK,CHECK-RV64
+; RUN: llc -verify-machineinstrs -mtriple=riscv32 -mattr=+v,+d,+m,+zbb %s -o - \
+; RUN:   | FileCheck %s --check-prefixes=CHECK,CHECK-VRGATHER,CHECK-VRGATHER-RV32
+; RUN: llc -verify-machineinstrs -mtriple=riscv64 -mattr=+v,+d,+m,+zbb %s -o - \
+; RUN:   | FileCheck %s --check-prefixes=CHECK,CHECK-VRGATHER,CHECK-VRGATHER-RV64
+; RUN: llc -verify-machineinstrs -mtriple=riscv32 -mattr=+v,+d,+m,+zbb,+optimized-indexed-load-store %s -o - \
+; RUN:   | FileCheck %s --check-prefixes=CHECK,CHECK-INDEXED,CHECK-INDEXED-RV32
+; RUN: llc -verify-machineinstrs -mtriple=riscv64 -mattr=+v,+d,+m,+zbb,+optimized-indexed-load-store %s -o - \
+; RUN:   | FileCheck %s --check-prefixes=CHECK,CHECK-INDEXED,CHECK-INDEXED-RV64
 
 ; Load + expand for i8 type
 
 define <1 x i8> @test_expandload_v1i8(ptr %base, <1 x i1> %mask, <1 x i8> %passthru) {
-; CHECK-LABEL: test_expandload_v1i8:
-; CHECK:       # %bb.0:
-; CHECK-NEXT:    vsetivli zero, 1, e8, mf8, ta, mu
-; CHECK-NEXT:    viota.m v9, v0
-; CHECK-NEXT:    vluxei8.v v8, (a0), v9, v0.t
-; CHECK-NEXT:    ret
+; CHECK-VRGATHER-LABEL: test_expandload_v1i8:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e8, mf8, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e8, mf8, ta, ma
+; CHECK-VRGATHER-NEXT:    vle8.v v9, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e8, mf8, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v10, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-VRGATHER-NEXT:    ret
+;
+; CHECK-INDEXED-LABEL: test_expandload_v1i8:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    vsetivli zero, 1, e8, mf8, ta, mu
+; CHECK-INDEXED-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-NEXT:    vluxei8.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <1 x i8> @llvm.masked.expandload.v1i8(ptr align 1 %base, <1 x i1> %mask, <1 x i8> %passthru)
   ret <1 x i8> %res
 }
@@ -26,12 +43,23 @@ define <1 x i8> @test_expandload_v1i8_all_ones(ptr %base, <1 x i8> %passthru) {
 }
 
 define <2 x i8> @test_expandload_v2i8(ptr %base, <2 x i1> %mask, <2 x i8> %passthru) {
-; CHECK-LABEL: test_expandload_v2i8:
-; CHECK:       # %bb.0:
-; CHECK-NEXT:    vsetivli zero, 2, e8, mf8, ta, mu
-; CHECK-NEXT:    viota.m v9, v0
-; CHECK-NEXT:    vluxei8.v v8, (a0), v9, v0.t
-; CHECK-NEXT:    ret
+; CHECK-VRGATHER-LABEL: test_expandload_v2i8:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e8, mf8, ta, ma
+; CHECK-VRGATHER-NEXT:    vle8.v v9, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e8, mf8, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v10, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-VRGATHER-NEXT:    ret
+;
+; CHECK-INDEXED-LABEL: test_expandload_v2i8:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    vsetivli zero, 2, e8, mf8, ta, mu
+; CHECK-INDEXED-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-NEXT:    vluxei8.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <2 x i8> @llvm.masked.expandload.v2i8(ptr align 1 %base, <2 x i1> %mask, <2 x i8> %passthru)
   ret <2 x i8> %res
 }
@@ -47,12 +75,23 @@ define <2 x i8> @test_expandload_v2i8_all_ones(ptr %base, <2 x i8> %passthru) {
 }
 
 define <4 x i8> @test_expandload_v4i8(ptr %base, <4 x i1> %mask, <4 x i8> %passthru) {
-; CHECK-LABEL: test_expandload_v4i8:
-; CHECK:       # %bb.0:
-; CHECK-NEXT:    vsetivli zero, 4, e8, mf4, ta, mu
-; CHECK-NEXT:    viota.m v9, v0
-; CHECK-NEXT:    vluxei8.v v8, (a0), v9, v0.t
-; CHECK-NEXT:    ret
+; CHECK-VRGATHER-LABEL: test_expandload_v4i8:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e8, mf4, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e8, mf4, ta, ma
+; CHECK-VRGATHER-NEXT:    vle8.v v9, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e8, mf4, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v10, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-VRGATHER-NEXT:    ret
+;
+; CHECK-INDEXED-LABEL: test_expandload_v4i8:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    vsetivli zero, 4, e8, mf4, ta, mu
+; CHECK-INDEXED-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-NEXT:    vluxei8.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <4 x i8> @llvm.masked.expandload.v4i8(ptr align 1 %base, <4 x i1> %mask, <4 x i8> %passthru)
   ret <4 x i8> %res
 }
@@ -68,12 +107,23 @@ define <4 x i8> @test_expandload_v4i8_all_ones(ptr %base, <4 x i8> %passthru) {
 }
 
 define <8 x i8> @test_expandload_v8i8(ptr %base, <8 x i1> %mask, <8 x i8> %passthru) {
-; CHECK-LABEL: test_expandload_v8i8:
-; CHECK:       # %bb.0:
-; CHECK-NEXT:    vsetivli zero, 8, e8, mf2, ta, mu
-; CHECK-NEXT:    viota.m v9, v0
-; CHECK-NEXT:    vluxei8.v v8, (a0), v9, v0.t
-; CHECK-NEXT:    ret
+; CHECK-VRGATHER-LABEL: test_expandload_v8i8:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e8, mf2, ta, ma
+; CHECK-VRGATHER-NEXT:    vle8.v v9, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e8, mf2, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v10, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-VRGATHER-NEXT:    ret
+;
+; CHECK-INDEXED-LABEL: test_expandload_v8i8:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    vsetivli zero, 8, e8, mf2, ta, mu
+; CHECK-INDEXED-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-NEXT:    vluxei8.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <8 x i8> @llvm.masked.expandload.v8i8(ptr align 1 %base, <8 x i1> %mask, <8 x i8> %passthru)
   ret <8 x i8> %res
 }
@@ -89,12 +139,23 @@ define <8 x i8> @test_expandload_v8i8_all_ones(ptr %base, <8 x i8> %passthru) {
 }
 
 define <16 x i8> @test_expandload_v16i8(ptr %base, <16 x i1> %mask, <16 x i8> %passthru) {
-; CHECK-LABEL: test_expandload_v16i8:
-; CHECK:       # %bb.0:
-; CHECK-NEXT:    vsetivli zero, 16, e8, m1, ta, mu
-; CHECK-NEXT:    viota.m v9, v0
-; CHECK-NEXT:    vluxei8.v v8, (a0), v9, v0.t
-; CHECK-NEXT:    ret
+; CHECK-VRGATHER-LABEL: test_expandload_v16i8:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 16, e8, m1, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e8, m1, ta, ma
+; CHECK-VRGATHER-NEXT:    vle8.v v9, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 16, e8, m1, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v10, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-VRGATHER-NEXT:    ret
+;
+; CHECK-INDEXED-LABEL: test_expandload_v16i8:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    vsetivli zero, 16, e8, m1, ta, mu
+; CHECK-INDEXED-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-NEXT:    vluxei8.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <16 x i8> @llvm.masked.expandload.v16i8(ptr align 1 %base, <16 x i1> %mask, <16 x i8> %passthru)
   ret <16 x i8> %res
 }
@@ -110,13 +171,25 @@ define <16 x i8> @test_expandload_v16i8_all_ones(ptr %base, <16 x i8> %passthru)
 }
 
 define <32 x i8> @test_expandload_v32i8(ptr %base, <32 x i1> %mask, <32 x i8> %passthru) {
-; CHECK-LABEL: test_expandload_v32i8:
-; CHECK:       # %bb.0:
-; CHECK-NEXT:    li a1, 32
-; CHECK-NEXT:    vsetvli zero, a1, e8, m2, ta, mu
-; CHECK-NEXT:    viota.m v10, v0
-; CHECK-NEXT:    vluxei8.v v8, (a0), v10, v0.t
-; CHECK-NEXT:    ret
+; CHECK-VRGATHER-LABEL: test_expandload_v32i8:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    li a1, 32
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e8, m2, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a2, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a2, e8, m2, ta, ma
+; CHECK-VRGATHER-NEXT:    vle8.v v10, (a0)
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e8, m2, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v12, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v10, v12, v0.t
+; CHECK-VRGATHER-NEXT:    ret
+;
+; CHECK-INDEXED-LABEL: test_expandload_v32i8:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    li a1, 32
+; CHECK-INDEXED-NEXT:    vsetvli zero, a1, e8, m2, ta, mu
+; CHECK-INDEXED-NEXT:    viota.m v10, v0
+; CHECK-INDEXED-NEXT:    vluxei8.v v8, (a0), v10, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <32 x i8> @llvm.masked.expandload.v32i8(ptr align 1 %base, <32 x i1> %mask, <32 x i8> %passthru)
   ret <32 x i8> %res
 }
@@ -133,13 +206,25 @@ define <32 x i8> @test_expandload_v32i8_all_ones(ptr %base, <32 x i8> %passthru)
 }
 
 define <64 x i8> @test_expandload_v64i8(ptr %base, <64 x i1> %mask, <64 x i8> %passthru) {
-; CHECK-LABEL: test_expandload_v64i8:
-; CHECK:       # %bb.0:
-; CHECK-NEXT:    li a1, 64
-; CHECK-NEXT:    vsetvli zero, a1, e8, m4, ta, mu
-; CHECK-NEXT:    viota.m v12, v0
-; CHECK-NEXT:    vluxei8.v v8, (a0), v12, v0.t
-; CHECK-NEXT:    ret
+; CHECK-VRGATHER-LABEL: test_expandload_v64i8:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    li a1, 64
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e8, m4, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a2, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a2, e8, m4, ta, ma
+; CHECK-VRGATHER-NEXT:    vle8.v v12, (a0)
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e8, m4, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v16, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v12, v16, v0.t
+; CHECK-VRGATHER-NEXT:    ret
+;
+; CHECK-INDEXED-LABEL: test_expandload_v64i8:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    li a1, 64
+; CHECK-INDEXED-NEXT:    vsetvli zero, a1, e8, m4, ta, mu
+; CHECK-INDEXED-NEXT:    viota.m v12, v0
+; CHECK-INDEXED-NEXT:    vluxei8.v v8, (a0), v12, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <64 x i8> @llvm.masked.expandload.v64i8(ptr align 1 %base, <64 x i1> %mask, <64 x i8> %passthru)
   ret <64 x i8> %res
 }
@@ -156,13 +241,25 @@ define <64 x i8> @test_expandload_v64i8_all_ones(ptr %base, <64 x i8> %passthru)
 }
 
 define <128 x i8> @test_expandload_v128i8(ptr %base, <128 x i1> %mask, <128 x i8> %passthru) {
-; CHECK-LABEL: test_expandload_v128i8:
-; CHECK:       # %bb.0:
-; CHECK-NEXT:    li a1, 128
-; CHECK-NEXT:    vsetvli zero, a1, e8, m8, ta, mu
-; CHECK-NEXT:    viota.m v16, v0
-; CHECK-NEXT:    vluxei8.v v8, (a0), v16, v0.t
-; CHECK-NEXT:    ret
+; CHECK-VRGATHER-LABEL: test_expandload_v128i8:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    li a1, 128
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a2, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a2, e8, m8, ta, ma
+; CHECK-VRGATHER-NEXT:    vle8.v v16, (a0)
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e8, m8, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v24, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v16, v24, v0.t
+; CHECK-VRGATHER-NEXT:    ret
+;
+; CHECK-INDEXED-LABEL: test_expandload_v128i8:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    li a1, 128
+; CHECK-INDEXED-NEXT:    vsetvli zero, a1, e8, m8, ta, mu
+; CHECK-INDEXED-NEXT:    viota.m v16, v0
+; CHECK-INDEXED-NEXT:    vluxei8.v v8, (a0), v16, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <128 x i8> @llvm.masked.expandload.v128i8(ptr align 1 %base, <128 x i1> %mask, <128 x i8> %passthru)
   ret <128 x i8> %res
 }
@@ -179,157 +276,408 @@ define <128 x i8> @test_expandload_v128i8_all_ones(ptr %base, <128 x i8> %passth
 }
 
 define <256 x i8> @test_expandload_v256i8(ptr %base, <256 x i1> %mask, <256 x i8> %passthru) {
-; CHECK-RV32-LABEL: test_expandload_v256i8:
-; CHECK-RV32:       # %bb.0:
-; CHECK-RV32-NEXT:    addi sp, sp, -16
-; CHECK-RV32-NEXT:    .cfi_def_cfa_offset 16
-; CHECK-RV32-NEXT:    csrr a2, vlenb
-; CHECK-RV32-NEXT:    slli a2, a2, 4
-; CHECK-RV32-NEXT:    sub sp, sp, a2
-; CHECK-RV32-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
-; CHECK-RV32-NEXT:    addi a2, sp, 16
-; CHECK-RV32-NEXT:    vs8r.v v16, (a2) # Unknown-size Folded Spill
-; CHECK-RV32-NEXT:    vmv1r.v v9, v0
-; CHECK-RV32-NEXT:    li a2, 128
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, ta, ma
-; CHECK-RV32-NEXT:    vle8.v v16, (a1)
-; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV32-NEXT:    vslidedown.vi v10, v0, 1
-; CHECK-RV32-NEXT:    li a1, 32
-; CHECK-RV32-NEXT:    vsrl.vx v11, v10, a1
-; CHECK-RV32-NEXT:    vmv.x.s a3, v11
-; CHECK-RV32-NEXT:    vsrl.vx v11, v0, a1
-; CHECK-RV32-NEXT:    vmv.x.s a1, v11
-; CHECK-RV32-NEXT:    vmv.x.s a4, v10
-; CHECK-RV32-NEXT:    vmv.x.s a5, v0
-; CHECK-RV32-NEXT:    cpop a1, a1
-; CHECK-RV32-NEXT:    cpop a5, a5
-; CHECK-RV32-NEXT:    add a1, a5, a1
-; CHECK-RV32-NEXT:    cpop a3, a3
-; CHECK-RV32-NEXT:    cpop a4, a4
-; CHECK-RV32-NEXT:    add a3, a4, a3
-; CHECK-RV32-NEXT:    add a1, a1, a3
-; CHECK-RV32-NEXT:    add a1, a0, a1
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, ta, mu
-; CHECK-RV32-NEXT:    viota.m v24, v8
-; CHECK-RV32-NEXT:    csrr a2, vlenb
-; CHECK-RV32-NEXT:    slli a2, a2, 3
-; CHECK-RV32-NEXT:    add a2, sp, a2
-; CHECK-RV32-NEXT:    addi a2, a2, 16
-; CHECK-RV32-NEXT:    vs8r.v v24, (a2) # Unknown-size Folded Spill
-; CHECK-RV32-NEXT:    vmv1r.v v0, v8
-; CHECK-RV32-NEXT:    csrr a2, vlenb
-; CHECK-RV32-NEXT:    slli a2, a2, 3
-; CHECK-RV32-NEXT:    add a2, sp, a2
-; CHECK-RV32-NEXT:    addi a2, a2, 16
-; CHECK-RV32-NEXT:    vl8r.v v24, (a2) # Unknown-size Folded Reload
-; CHECK-RV32-NEXT:    vluxei8.v v16, (a1), v24, v0.t
-; CHECK-RV32-NEXT:    viota.m v24, v9
-; CHECK-RV32-NEXT:    vmv1r.v v0, v9
-; CHECK-RV32-NEXT:    addi a1, sp, 16
-; CHECK-RV32-NEXT:    vl8r.v v8, (a1) # Unknown-size Folded Reload
-; CHECK-RV32-NEXT:    vluxei8.v v8, (a0), v24, v0.t
-; CHECK-RV32-NEXT:    csrr a0, vlenb
-; CHECK-RV32-NEXT:    slli a0, a0, 4
-; CHECK-RV32-NEXT:    add sp, sp, a0
-; CHECK-RV32-NEXT:    addi sp, sp, 16
-; CHECK-RV32-NEXT:    ret
+; CHECK-VRGATHER-RV32-LABEL: test_expandload_v256i8:
+; CHECK-VRGATHER-RV32:       # %bb.0:
+; CHECK-VRGATHER-RV32-NEXT:    addi sp, sp, -16
+; CHECK-VRGATHER-RV32-NEXT:    .cfi_def_cfa_offset 16
+; CHECK-VRGATHER-RV32-NEXT:    csrr a2, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a2, 5
+; CHECK-VRGATHER-RV32-NEXT:    sub sp, sp, a2
+; CHECK-VRGATHER-RV32-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x20, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 32 * vlenb
+; CHECK-VRGATHER-RV32-NEXT:    csrr a2, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 24
+; CHECK-VRGATHER-RV32-NEXT:    mul a2, a2, a3
+; CHECK-VRGATHER-RV32-NEXT:    add a2, sp, a2
+; CHECK-VRGATHER-RV32-NEXT:    addi a2, a2, 16
+; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v16, (a2) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v7, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 128
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vle8.v v8, (a1)
+; CHECK-VRGATHER-RV32-NEXT:    csrr a1, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a1, 3
+; CHECK-VRGATHER-RV32-NEXT:    add a1, sp, a1
+; CHECK-VRGATHER-RV32-NEXT:    addi a1, a1, 16
+; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v8, (a1) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslidedown.vi v9, v0, 1
+; CHECK-VRGATHER-RV32-NEXT:    li a1, 32
+; CHECK-VRGATHER-RV32-NEXT:    vsrl.vx v10, v9, a1
+; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a3, v10
+; CHECK-VRGATHER-RV32-NEXT:    vsrl.vx v10, v0, a1
+; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a1, v10
+; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a4, v9
+; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a5, v0
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vcpop.m a6, v0
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a6, e8, m8, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vle8.v v8, (a0)
+; CHECK-VRGATHER-RV32-NEXT:    csrr a6, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    slli a6, a6, 4
+; CHECK-VRGATHER-RV32-NEXT:    add a6, sp, a6
+; CHECK-VRGATHER-RV32-NEXT:    addi a6, a6, 16
+; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v8, (a6) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV32-NEXT:    cpop a1, a1
+; CHECK-VRGATHER-RV32-NEXT:    cpop a5, a5
+; CHECK-VRGATHER-RV32-NEXT:    add a1, a5, a1
+; CHECK-VRGATHER-RV32-NEXT:    cpop a3, a3
+; CHECK-VRGATHER-RV32-NEXT:    cpop a4, a4
+; CHECK-VRGATHER-RV32-NEXT:    add a3, a4, a3
+; CHECK-VRGATHER-RV32-NEXT:    add a1, a1, a3
+; CHECK-VRGATHER-RV32-NEXT:    add a0, a0, a1
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vcpop.m a1, v7
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vle8.v v8, (a0)
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, sp, 16
+; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v8, (a0) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, ta, mu
+; CHECK-VRGATHER-RV32-NEXT:    viota.m v16, v0
+; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 4
+; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    li a1, 24
+; CHECK-VRGATHER-RV32-NEXT:    mul a0, a0, a1
+; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV32-NEXT:    vrgather.vv v8, v24, v16, v0.t
+; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    li a1, 24
+; CHECK-VRGATHER-RV32-NEXT:    mul a0, a0, a1
+; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v8, (a0) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV32-NEXT:    viota.m v16, v7
+; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 4
+; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v0, v7
+; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 3
+; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, sp, 16
+; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 4
+; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV32-NEXT:    vrgather.vv v16, v24, v8, v0.t
+; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    li a1, 24
+; CHECK-VRGATHER-RV32-NEXT:    mul a0, a0, a1
+; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 5
+; CHECK-VRGATHER-RV32-NEXT:    add sp, sp, a0
+; CHECK-VRGATHER-RV32-NEXT:    addi sp, sp, 16
+; CHECK-VRGATHER-RV32-NEXT:    ret
 ;
-; CHECK-RV64-LABEL: test_expandload_v256i8:
-; CHECK-RV64:       # %bb.0:
-; CHECK-RV64-NEXT:    addi sp, sp, -16
-; CHECK-RV64-NEXT:    .cfi_def_cfa_offset 16
-; CHECK-RV64-NEXT:    csrr a2, vlenb
-; CHECK-RV64-NEXT:    slli a2, a2, 4
-; CHECK-RV64-NEXT:    sub sp, sp, a2
-; CHECK-RV64-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
-; CHECK-RV64-NEXT:    addi a2, sp, 16
-; CHECK-RV64-NEXT:    vs8r.v v16, (a2) # Unknown-size Folded Spill
-; CHECK-RV64-NEXT:    vmv1r.v v9, v0
-; CHECK-RV64-NEXT:    li a2, 128
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, ta, ma
-; CHECK-RV64-NEXT:    vle8.v v16, (a1)
-; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV64-NEXT:    vslidedown.vi v10, v0, 1
-; CHECK-RV64-NEXT:    vmv.x.s a1, v10
-; CHECK-RV64-NEXT:    vmv.x.s a3, v0
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, ta, mu
-; CHECK-RV64-NEXT:    viota.m v24, v8
-; CHECK-RV64-NEXT:    csrr a2, vlenb
-; CHECK-RV64-NEXT:    slli a2, a2, 3
-; CHECK-RV64-NEXT:    add a2, sp, a2
-; CHECK-RV64-NEXT:    addi a2, a2, 16
-; CHECK-RV64-NEXT:    vs8r.v v24, (a2) # Unknown-size Folded Spill
-; CHECK-RV64-NEXT:    cpop a2, a3
-; CHECK-RV64-NEXT:    cpop a1, a1
-; CHECK-RV64-NEXT:    add a2, a0, a2
-; CHECK-RV64-NEXT:    add a1, a2, a1
-; CHECK-RV64-NEXT:    vmv1r.v v0, v8
-; CHECK-RV64-NEXT:    csrr a2, vlenb
-; CHECK-RV64-NEXT:    slli a2, a2, 3
-; CHECK-RV64-NEXT:    add a2, sp, a2
-; CHECK-RV64-NEXT:    addi a2, a2, 16
-; CHECK-RV64-NEXT:    vl8r.v v24, (a2) # Unknown-size Folded Reload
-; CHECK-RV64-NEXT:    vluxei8.v v16, (a1), v24, v0.t
-; CHECK-RV64-NEXT:    viota.m v24, v9
-; CHECK-RV64-NEXT:    vmv1r.v v0, v9
-; CHECK-RV64-NEXT:    addi a1, sp, 16
-; CHECK-RV64-NEXT:    vl8r.v v8, (a1) # Unknown-size Folded Reload
-; CHECK-RV64-NEXT:    vluxei8.v v8, (a0), v24, v0.t
-; CHECK-RV64-NEXT:    csrr a0, vlenb
-; CHECK-RV64-NEXT:    slli a0, a0, 4
-; CHECK-RV64-NEXT:    add sp, sp, a0
-; CHECK-RV64-NEXT:    addi sp, sp, 16
-; CHECK-RV64-NEXT:    ret
+; CHECK-VRGATHER-RV64-LABEL: test_expandload_v256i8:
+; CHECK-VRGATHER-RV64:       # %bb.0:
+; CHECK-VRGATHER-RV64-NEXT:    addi sp, sp, -16
+; CHECK-VRGATHER-RV64-NEXT:    .cfi_def_cfa_offset 16
+; CHECK-VRGATHER-RV64-NEXT:    csrr a2, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a2, 5
+; CHECK-VRGATHER-RV64-NEXT:    sub sp, sp, a2
+; CHECK-VRGATHER-RV64-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x20, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 32 * vlenb
+; CHECK-VRGATHER-RV64-NEXT:    csrr a2, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 24
+; CHECK-VRGATHER-RV64-NEXT:    mul a2, a2, a3
+; CHECK-VRGATHER-RV64-NEXT:    add a2, sp, a2
+; CHECK-VRGATHER-RV64-NEXT:    addi a2, a2, 16
+; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v16, (a2) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v7, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 128
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vle8.v v8, (a1)
+; CHECK-VRGATHER-RV64-NEXT:    addi a1, sp, 16
+; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v8, (a1) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslidedown.vi v9, v0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv.x.s a1, v9
+; CHECK-VRGATHER-RV64-NEXT:    vmv.x.s a3, v0
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vcpop.m a4, v0
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a4, e8, m8, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vle8.v v24, (a0)
+; CHECK-VRGATHER-RV64-NEXT:    csrr a4, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    slli a4, a4, 4
+; CHECK-VRGATHER-RV64-NEXT:    add a4, sp, a4
+; CHECK-VRGATHER-RV64-NEXT:    addi a4, a4, 16
+; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v24, (a4) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vcpop.m a4, v7
+; CHECK-VRGATHER-RV64-NEXT:    cpop a3, a3
+; CHECK-VRGATHER-RV64-NEXT:    cpop a1, a1
+; CHECK-VRGATHER-RV64-NEXT:    add a0, a0, a3
+; CHECK-VRGATHER-RV64-NEXT:    add a0, a0, a1
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a4, e8, m8, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vle8.v v8, (a0)
+; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 3
+; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v8, (a0) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, ta, mu
+; CHECK-VRGATHER-RV64-NEXT:    viota.m v16, v0
+; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 4
+; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 24
+; CHECK-VRGATHER-RV64-NEXT:    mul a0, a0, a1
+; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV64-NEXT:    vrgather.vv v8, v24, v16, v0.t
+; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 24
+; CHECK-VRGATHER-RV64-NEXT:    mul a0, a0, a1
+; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v8, (a0) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV64-NEXT:    viota.m v16, v7
+; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 4
+; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v0, v7
+; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 3
+; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 4
+; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, sp, 16
+; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV64-NEXT:    vrgather.vv v8, v24, v16, v0.t
+; CHECK-VRGATHER-RV64-NEXT:    vmv.v.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 24
+; CHECK-VRGATHER-RV64-NEXT:    mul a0, a0, a1
+; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 5
+; CHECK-VRGATHER-RV64-NEXT:    add sp, sp, a0
+; CHECK-VRGATHER-RV64-NEXT:    addi sp, sp, 16
+; CHECK-VRGATHER-RV64-NEXT:    ret
+;
+; CHECK-INDEXED-RV32-LABEL: test_expandload_v256i8:
+; CHECK-INDEXED-RV32:       # %bb.0:
+; CHECK-INDEXED-RV32-NEXT:    addi sp, sp, -16
+; CHECK-INDEXED-RV32-NEXT:    .cfi_def_cfa_offset 16
+; CHECK-INDEXED-RV32-NEXT:    csrr a2, vlenb
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a2, 4
+; CHECK-INDEXED-RV32-NEXT:    sub sp, sp, a2
+; CHECK-INDEXED-RV32-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
+; CHECK-INDEXED-RV32-NEXT:    addi a2, sp, 16
+; CHECK-INDEXED-RV32-NEXT:    vs8r.v v16, (a2) # Unknown-size Folded Spill
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v9, v0
+; CHECK-INDEXED-RV32-NEXT:    li a2, 128
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vle8.v v16, (a1)
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vslidedown.vi v10, v0, 1
+; CHECK-INDEXED-RV32-NEXT:    li a1, 32
+; CHECK-INDEXED-RV32-NEXT:    vsrl.vx v11, v10, a1
+; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a3, v11
+; CHECK-INDEXED-RV32-NEXT:    vsrl.vx v11, v0, a1
+; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a1, v11
+; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a4, v10
+; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a5, v0
+; CHECK-INDEXED-RV32-NEXT:    cpop a1, a1
+; CHECK-INDEXED-RV32-NEXT:    cpop a5, a5
+; CHECK-INDEXED-RV32-NEXT:    add a1, a5, a1
+; CHECK-INDEXED-RV32-NEXT:    cpop a3, a3
+; CHECK-INDEXED-RV32-NEXT:    cpop a4, a4
+; CHECK-INDEXED-RV32-NEXT:    add a3, a4, a3
+; CHECK-INDEXED-RV32-NEXT:    add a1, a1, a3
+; CHECK-INDEXED-RV32-NEXT:    add a1, a0, a1
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, ta, mu
+; CHECK-INDEXED-RV32-NEXT:    viota.m v24, v8
+; CHECK-INDEXED-RV32-NEXT:    csrr a2, vlenb
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a2, 3
+; CHECK-INDEXED-RV32-NEXT:    add a2, sp, a2
+; CHECK-INDEXED-RV32-NEXT:    addi a2, a2, 16
+; CHECK-INDEXED-RV32-NEXT:    vs8r.v v24, (a2) # Unknown-size Folded Spill
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v0, v8
+; CHECK-INDEXED-RV32-NEXT:    csrr a2, vlenb
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a2, 3
+; CHECK-INDEXED-RV32-NEXT:    add a2, sp, a2
+; CHECK-INDEXED-RV32-NEXT:    addi a2, a2, 16
+; CHECK-INDEXED-RV32-NEXT:    vl8r.v v24, (a2) # Unknown-size Folded Reload
+; CHECK-INDEXED-RV32-NEXT:    vluxei8.v v16, (a1), v24, v0.t
+; CHECK-INDEXED-RV32-NEXT:    viota.m v24, v9
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v0, v9
+; CHECK-INDEXED-RV32-NEXT:    addi a1, sp, 16
+; CHECK-INDEXED-RV32-NEXT:    vl8r.v v8, (a1) # Unknown-size Folded Reload
+; CHECK-INDEXED-RV32-NEXT:    vluxei8.v v8, (a0), v24, v0.t
+; CHECK-INDEXED-RV32-NEXT:    csrr a0, vlenb
+; CHECK-INDEXED-RV32-NEXT:    slli a0, a0, 4
+; CHECK-INDEXED-RV32-NEXT:    add sp, sp, a0
+; CHECK-INDEXED-RV32-NEXT:    addi sp, sp, 16
+; CHECK-INDEXED-RV32-NEXT:    ret
+;
+; CHECK-INDEXED-RV64-LABEL: test_expandload_v256i8:
+; CHECK-INDEXED-RV64:       # %bb.0:
+; CHECK-INDEXED-RV64-NEXT:    addi sp, sp, -16
+; CHECK-INDEXED-RV64-NEXT:    .cfi_def_cfa_offset 16
+; CHECK-INDEXED-RV64-NEXT:    csrr a2, vlenb
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a2, 4
+; CHECK-INDEXED-RV64-NEXT:    sub sp, sp, a2
+; CHECK-INDEXED-RV64-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
+; CHECK-INDEXED-RV64-NEXT:    addi a2, sp, 16
+; CHECK-INDEXED-RV64-NEXT:    vs8r.v v16, (a2) # Unknown-size Folded Spill
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v9, v0
+; CHECK-INDEXED-RV64-NEXT:    li a2, 128
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vle8.v v16, (a1)
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vslidedown.vi v10, v0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv.x.s a1, v10
+; CHECK-INDEXED-RV64-NEXT:    vmv.x.s a3, v0
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, ta, mu
+; CHECK-INDEXED-RV64-NEXT:    viota.m v24, v8
+; CHECK-INDEXED-RV64-NEXT:    csrr a2, vlenb
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a2, 3
+; CHECK-INDEXED-RV64-NEXT:    add a2, sp, a2
+; CHECK-INDEXED-RV64-NEXT:    addi a2, a2, 16
+; CHECK-INDEXED-RV64-NEXT:    vs8r.v v24, (a2) # Unknown-size Folded Spill
+; CHECK-INDEXED-RV64-NEXT:    cpop a2, a3
+; CHECK-INDEXED-RV64-NEXT:    cpop a1, a1
+; CHECK-INDEXED-RV64-NEXT:    add a2, a0, a2
+; CHECK-INDEXED-RV64-NEXT:    add a1, a2, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v0, v8
+; CHECK-INDEXED-RV64-NEXT:    csrr a2, vlenb
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a2, 3
+; CHECK-INDEXED-RV64-NEXT:    add a2, sp, a2
+; CHECK-INDEXED-RV64-NEXT:    addi a2, a2, 16
+; CHECK-INDEXED-RV64-NEXT:    vl8r.v v24, (a2) # Unknown-size Folded Reload
+; CHECK-INDEXED-RV64-NEXT:    vluxei8.v v16, (a1), v24, v0.t
+; CHECK-INDEXED-RV64-NEXT:    viota.m v24, v9
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v0, v9
+; CHECK-INDEXED-RV64-NEXT:    addi a1, sp, 16
+; CHECK-INDEXED-RV64-NEXT:    vl8r.v v8, (a1) # Unknown-size Folded Reload
+; CHECK-INDEXED-RV64-NEXT:    vluxei8.v v8, (a0), v24, v0.t
+; CHECK-INDEXED-RV64-NEXT:    csrr a0, vlenb
+; CHECK-INDEXED-RV64-NEXT:    slli a0, a0, 4
+; CHECK-INDEXED-RV64-NEXT:    add sp, sp, a0
+; CHECK-INDEXED-RV64-NEXT:    addi sp, sp, 16
+; CHECK-INDEXED-RV64-NEXT:    ret
   %res = call <256 x i8> @llvm.masked.expandload.v256i8(ptr align 1 %base, <256 x i1> %mask, <256 x i8> %passthru)
   ret <256 x i8> %res
 }
 
 define <256 x i8> @test_expandload_v256i8_all_ones(ptr %base, <256 x i8> %passthru) {
-; CHECK-RV32-LABEL: test_expandload_v256i8_all_ones:
-; CHECK-RV32:       # %bb.0:
-; CHECK-RV32-NEXT:    li a1, 128
-; CHECK-RV32-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
-; CHECK-RV32-NEXT:    vmset.m v8
-; CHECK-RV32-NEXT:    li a2, 32
-; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV32-NEXT:    vsrl.vx v9, v8, a2
-; CHECK-RV32-NEXT:    vmv.x.s a3, v9
-; CHECK-RV32-NEXT:    cpop a3, a3
-; CHECK-RV32-NEXT:    vmv.x.s a4, v8
-; CHECK-RV32-NEXT:    cpop a4, a4
-; CHECK-RV32-NEXT:    add a3, a4, a3
-; CHECK-RV32-NEXT:    vslidedown.vi v8, v8, 1
-; CHECK-RV32-NEXT:    vsrl.vx v9, v8, a2
-; CHECK-RV32-NEXT:    vmv.x.s a2, v9
-; CHECK-RV32-NEXT:    cpop a2, a2
-; CHECK-RV32-NEXT:    vmv.x.s a4, v8
-; CHECK-RV32-NEXT:    cpop a4, a4
-; CHECK-RV32-NEXT:    add a2, a4, a2
-; CHECK-RV32-NEXT:    add a3, a0, a3
-; CHECK-RV32-NEXT:    add a2, a3, a2
-; CHECK-RV32-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
-; CHECK-RV32-NEXT:    vle8.v v16, (a2)
-; CHECK-RV32-NEXT:    vle8.v v8, (a0)
-; CHECK-RV32-NEXT:    ret
+; CHECK-VRGATHER-RV32-LABEL: test_expandload_v256i8_all_ones:
+; CHECK-VRGATHER-RV32:       # %bb.0:
+; CHECK-VRGATHER-RV32-NEXT:    li a1, 128
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmset.m v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 32
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vsrl.vx v9, v8, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a3, v9
+; CHECK-VRGATHER-RV32-NEXT:    cpop a3, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a4, v8
+; CHECK-VRGATHER-RV32-NEXT:    cpop a4, a4
+; CHECK-VRGATHER-RV32-NEXT:    add a3, a4, a3
+; CHECK-VRGATHER-RV32-NEXT:    vslidedown.vi v8, v8, 1
+; CHECK-VRGATHER-RV32-NEXT:    vsrl.vx v9, v8, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a2, v9
+; CHECK-VRGATHER-RV32-NEXT:    cpop a2, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a4, v8
+; CHECK-VRGATHER-RV32-NEXT:    cpop a4, a4
+; CHECK-VRGATHER-RV32-NEXT:    add a2, a4, a2
+; CHECK-VRGATHER-RV32-NEXT:    add a3, a0, a3
+; CHECK-VRGATHER-RV32-NEXT:    add a2, a3, a2
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vle8.v v16, (a2)
+; CHECK-VRGATHER-RV32-NEXT:    vle8.v v8, (a0)
+; CHECK-VRGATHER-RV32-NEXT:    ret
+;
+; CHECK-VRGATHER-RV64-LABEL: test_expandload_v256i8_all_ones:
+; CHECK-VRGATHER-RV64:       # %bb.0:
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 128
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vle8.v v8, (a0)
+; CHECK-VRGATHER-RV64-NEXT:    vmset.m v16
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.x.s a2, v16
+; CHECK-VRGATHER-RV64-NEXT:    cpop a2, a2
+; CHECK-VRGATHER-RV64-NEXT:    vslidedown.vi v16, v16, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv.x.s a3, v16
+; CHECK-VRGATHER-RV64-NEXT:    cpop a3, a3
+; CHECK-VRGATHER-RV64-NEXT:    add a0, a0, a2
+; CHECK-VRGATHER-RV64-NEXT:    add a0, a0, a3
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vle8.v v16, (a0)
+; CHECK-VRGATHER-RV64-NEXT:    ret
+;
+; CHECK-INDEXED-RV32-LABEL: test_expandload_v256i8_all_ones:
+; CHECK-INDEXED-RV32:       # %bb.0:
+; CHECK-INDEXED-RV32-NEXT:    li a1, 128
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmset.m v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 32
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vsrl.vx v9, v8, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a3, v9
+; CHECK-INDEXED-RV32-NEXT:    cpop a3, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a4, v8
+; CHECK-INDEXED-RV32-NEXT:    cpop a4, a4
+; CHECK-INDEXED-RV32-NEXT:    add a3, a4, a3
+; CHECK-INDEXED-RV32-NEXT:    vslidedown.vi v8, v8, 1
+; CHECK-INDEXED-RV32-NEXT:    vsrl.vx v9, v8, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a2, v9
+; CHECK-INDEXED-RV32-NEXT:    cpop a2, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a4, v8
+; CHECK-INDEXED-RV32-NEXT:    cpop a4, a4
+; CHECK-INDEXED-RV32-NEXT:    add a2, a4, a2
+; CHECK-INDEXED-RV32-NEXT:    add a3, a0, a3
+; CHECK-INDEXED-RV32-NEXT:    add a2, a3, a2
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vle8.v v16, (a2)
+; CHECK-INDEXED-RV32-NEXT:    vle8.v v8, (a0)
+; CHECK-INDEXED-RV32-NEXT:    ret
 ;
-; CHECK-RV64-LABEL: test_expandload_v256i8_all_ones:
-; CHECK-RV64:       # %bb.0:
-; CHECK-RV64-NEXT:    li a1, 128
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
-; CHECK-RV64-NEXT:    vle8.v v8, (a0)
-; CHECK-RV64-NEXT:    vmset.m v16
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.x.s a2, v16
-; CHECK-RV64-NEXT:    cpop a2, a2
-; CHECK-RV64-NEXT:    vslidedown.vi v16, v16, 1
-; CHECK-RV64-NEXT:    vmv.x.s a3, v16
-; CHECK-RV64-NEXT:    cpop a3, a3
-; CHECK-RV64-NEXT:    add a0, a0, a2
-; CHECK-RV64-NEXT:    add a0, a0, a3
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
-; CHECK-RV64-NEXT:    vle8.v v16, (a0)
-; CHECK-RV64-NEXT:    ret
+; CHECK-INDEXED-RV64-LABEL: test_expandload_v256i8_all_ones:
+; CHECK-INDEXED-RV64:       # %bb.0:
+; CHECK-INDEXED-RV64-NEXT:    li a1, 128
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vle8.v v8, (a0)
+; CHECK-INDEXED-RV64-NEXT:    vmset.m v16
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.x.s a2, v16
+; CHECK-INDEXED-RV64-NEXT:    cpop a2, a2
+; CHECK-INDEXED-RV64-NEXT:    vslidedown.vi v16, v16, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv.x.s a3, v16
+; CHECK-INDEXED-RV64-NEXT:    cpop a3, a3
+; CHECK-INDEXED-RV64-NEXT:    add a0, a0, a2
+; CHECK-INDEXED-RV64-NEXT:    add a0, a0, a3
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vle8.v v16, (a0)
+; CHECK-INDEXED-RV64-NEXT:    ret
   %res = call <256 x i8> @llvm.masked.expandload.v256i8(ptr align 1 %base, <256 x i1> splat (i1 true), <256 x i8> %passthru)
   ret <256 x i8> %res
 }
@@ -337,14 +685,25 @@ define <256 x i8> @test_expandload_v256i8_all_ones(ptr %base, <256 x i8> %passth
 ; Load + expand for i16 type
 
 define <1 x i16> @test_expandload_v1i16(ptr %base, <1 x i1> %mask, <1 x i16> %passthru) {
-; CHECK-LABEL: test_expandload_v1i16:
-; CHECK:       # %bb.0:
-; CHECK-NEXT:    vsetivli zero, 1, e16, mf4, ta, ma
-; CHECK-NEXT:    viota.m v9, v0
-; CHECK-NEXT:    vsll.vi v9, v9, 1, v0.t
-; CHECK-NEXT:    vsetvli zero, zero, e16, mf4, ta, mu
-; CHECK-NEXT:    vluxei16.v v8, (a0), v9, v0.t
-; CHECK-NEXT:    ret
+; CHECK-VRGATHER-LABEL: test_expandload_v1i16:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e8, mf8, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e16, mf4, ta, ma
+; CHECK-VRGATHER-NEXT:    vle16.v v9, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e16, mf4, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v10, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-VRGATHER-NEXT:    ret
+;
+; CHECK-INDEXED-LABEL: test_expandload_v1i16:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    vsetivli zero, 1, e16, mf4, ta, ma
+; CHECK-INDEXED-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 1, v0.t
+; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e16, mf4, ta, mu
+; CHECK-INDEXED-NEXT:    vluxei16.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <1 x i16> @llvm.masked.expandload.v1i16(ptr align 2 %base, <1 x i1> %mask, <1 x i16> %passthru)
   ret <1 x i16> %res
 }
@@ -360,14 +719,25 @@ define <1 x i16> @test_expandload_v1i16_all_ones(ptr %base, <1 x i16> %passthru)
 }
 
 define <2 x i16> @test_expandload_v2i16(ptr %base, <2 x i1> %mask, <2 x i16> %passthru) {
-; CHECK-LABEL: test_expandload_v2i16:
-; CHECK:       # %bb.0:
-; CHECK-NEXT:    vsetivli zero, 2, e16, mf4, ta, ma
-; CHECK-NEXT:    viota.m v9, v0
-; CHECK-NEXT:    vsll.vi v9, v9, 1, v0.t
-; CHECK-NEXT:    vsetvli zero, zero, e16, mf4, ta, mu
-; CHECK-NEXT:    vluxei16.v v8, (a0), v9, v0.t
-; CHECK-NEXT:    ret
+; CHECK-VRGATHER-LABEL: test_expandload_v2i16:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e16, mf4, ta, ma
+; CHECK-VRGATHER-NEXT:    vle16.v v9, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e16, mf4, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v10, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-VRGATHER-NEXT:    ret
+;
+; CHECK-INDEXED-LABEL: test_expandload_v2i16:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    vsetivli zero, 2, e16, mf4, ta, ma
+; CHECK-INDEXED-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 1, v0.t
+; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e16, mf4, ta, mu
+; CHECK-INDEXED-NEXT:    vluxei16.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <2 x i16> @llvm.masked.expandload.v2i16(ptr align 2 %base, <2 x i1> %mask, <2 x i16> %passthru)
   ret <2 x i16> %res
 }
@@ -383,14 +753,25 @@ define <2 x i16> @test_expandload_v2i16_all_ones(ptr %base, <2 x i16> %passthru)
 }
 
 define <4 x i16> @test_expandload_v4i16(ptr %base, <4 x i1> %mask, <4 x i16> %passthru) {
-; CHECK-LABEL: test_expandload_v4i16:
-; CHECK:       # %bb.0:
-; CHECK-NEXT:    vsetivli zero, 4, e16, mf2, ta, ma
-; CHECK-NEXT:    viota.m v9, v0
-; CHECK-NEXT:    vsll.vi v9, v9, 1, v0.t
-; CHECK-NEXT:    vsetvli zero, zero, e16, mf2, ta, mu
-; CHECK-NEXT:    vluxei16.v v8, (a0), v9, v0.t
-; CHECK-NEXT:    ret
+; CHECK-VRGATHER-LABEL: test_expandload_v4i16:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e8, mf4, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e16, mf2, ta, ma
+; CHECK-VRGATHER-NEXT:    vle16.v v9, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e16, mf2, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v10, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-VRGATHER-NEXT:    ret
+;
+; CHECK-INDEXED-LABEL: test_expandload_v4i16:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    vsetivli zero, 4, e16, mf2, ta, ma
+; CHECK-INDEXED-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 1, v0.t
+; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e16, mf2, ta, mu
+; CHECK-INDEXED-NEXT:    vluxei16.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <4 x i16> @llvm.masked.expandload.v4i16(ptr align 2 %base, <4 x i1> %mask, <4 x i16> %passthru)
   ret <4 x i16> %res
 }
@@ -406,14 +787,25 @@ define <4 x i16> @test_expandload_v4i16_all_ones(ptr %base, <4 x i16> %passthru)
 }
 
 define <8 x i16> @test_expandload_v8i16(ptr %base, <8 x i1> %mask, <8 x i16> %passthru) {
-; CHECK-LABEL: test_expandload_v8i16:
-; CHECK:       # %bb.0:
-; CHECK-NEXT:    vsetivli zero, 8, e16, m1, ta, ma
-; CHECK-NEXT:    viota.m v9, v0
-; CHECK-NEXT:    vsll.vi v9, v9, 1, v0.t
-; CHECK-NEXT:    vsetvli zero, zero, e16, m1, ta, mu
-; CHECK-NEXT:    vluxei16.v v8, (a0), v9, v0.t
-; CHECK-NEXT:    ret
+; CHECK-VRGATHER-LABEL: test_expandload_v8i16:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e16, m1, ta, ma
+; CHECK-VRGATHER-NEXT:    vle16.v v9, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e16, m1, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v10, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-VRGATHER-NEXT:    ret
+;
+; CHECK-INDEXED-LABEL: test_expandload_v8i16:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    vsetivli zero, 8, e16, m1, ta, ma
+; CHECK-INDEXED-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 1, v0.t
+; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e16, m1, ta, mu
+; CHECK-INDEXED-NEXT:    vluxei16.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <8 x i16> @llvm.masked.expandload.v8i16(ptr align 2 %base, <8 x i1> %mask, <8 x i16> %passthru)
   ret <8 x i16> %res
 }
@@ -429,14 +821,25 @@ define <8 x i16> @test_expandload_v8i16_all_ones(ptr %base, <8 x i16> %passthru)
 }
 
 define <16 x i16> @test_expandload_v16i16(ptr %base, <16 x i1> %mask, <16 x i16> %passthru) {
-; CHECK-LABEL: test_expandload_v16i16:
-; CHECK:       # %bb.0:
-; CHECK-NEXT:    vsetivli zero, 16, e16, m2, ta, ma
-; CHECK-NEXT:    viota.m v10, v0
-; CHECK-NEXT:    vsll.vi v10, v10, 1, v0.t
-; CHECK-NEXT:    vsetvli zero, zero, e16, m2, ta, mu
-; CHECK-NEXT:    vluxei16.v v8, (a0), v10, v0.t
-; CHECK-NEXT:    ret
+; CHECK-VRGATHER-LABEL: test_expandload_v16i16:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 16, e8, m1, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e16, m2, ta, ma
+; CHECK-VRGATHER-NEXT:    vle16.v v10, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 16, e16, m2, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v12, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v10, v12, v0.t
+; CHECK-VRGATHER-NEXT:    ret
+;
+; CHECK-INDEXED-LABEL: test_expandload_v16i16:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    vsetivli zero, 16, e16, m2, ta, ma
+; CHECK-INDEXED-NEXT:    viota.m v10, v0
+; CHECK-INDEXED-NEXT:    vsll.vi v10, v10, 1, v0.t
+; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e16, m2, ta, mu
+; CHECK-INDEXED-NEXT:    vluxei16.v v8, (a0), v10, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <16 x i16> @llvm.masked.expandload.v16i16(ptr align 2 %base, <16 x i1> %mask, <16 x i16> %passthru)
   ret <16 x i16> %res
 }
@@ -452,15 +855,27 @@ define <16 x i16> @test_expandload_v16i16_all_ones(ptr %base, <16 x i16> %passth
 }
 
 define <32 x i16> @test_expandload_v32i16(ptr %base, <32 x i1> %mask, <32 x i16> %passthru) {
-; CHECK-LABEL: test_expandload_v32i16:
-; CHECK:       # %bb.0:
-; CHECK-NEXT:    li a1, 32
-; CHECK-NEXT:    vsetvli zero, a1, e16, m4, ta, ma
-; CHECK-NEXT:    viota.m v12, v0
-; CHECK-NEXT:    vsll.vi v12, v12, 1, v0.t
-; CHECK-NEXT:    vsetvli zero, zero, e16, m4, ta, mu
-; CHECK-NEXT:    vluxei16.v v8, (a0), v12, v0.t
-; CHECK-NEXT:    ret
+; CHECK-VRGATHER-LABEL: test_expandload_v32i16:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    li a1, 32
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e8, m2, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a2, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a2, e16, m4, ta, ma
+; CHECK-VRGATHER-NEXT:    vle16.v v12, (a0)
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e16, m4, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v16, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v12, v16, v0.t
+; CHECK-VRGATHER-NEXT:    ret
+;
+; CHECK-INDEXED-LABEL: test_expandload_v32i16:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    li a1, 32
+; CHECK-INDEXED-NEXT:    vsetvli zero, a1, e16, m4, ta, ma
+; CHECK-INDEXED-NEXT:    viota.m v12, v0
+; CHECK-INDEXED-NEXT:    vsll.vi v12, v12, 1, v0.t
+; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e16, m4, ta, mu
+; CHECK-INDEXED-NEXT:    vluxei16.v v8, (a0), v12, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <32 x i16> @llvm.masked.expandload.v32i16(ptr align 2 %base, <32 x i1> %mask, <32 x i16> %passthru)
   ret <32 x i16> %res
 }
@@ -477,15 +892,27 @@ define <32 x i16> @test_expandload_v32i16_all_ones(ptr %base, <32 x i16> %passth
 }
 
 define <64 x i16> @test_expandload_v64i16(ptr %base, <64 x i1> %mask, <64 x i16> %passthru) {
-; CHECK-LABEL: test_expandload_v64i16:
-; CHECK:       # %bb.0:
-; CHECK-NEXT:    li a1, 64
-; CHECK-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
-; CHECK-NEXT:    viota.m v16, v0
-; CHECK-NEXT:    vsll.vi v16, v16, 1, v0.t
-; CHECK-NEXT:    vsetvli zero, zero, e16, m8, ta, mu
-; CHECK-NEXT:    vluxei16.v v8, (a0), v16, v0.t
-; CHECK-NEXT:    ret
+; CHECK-VRGATHER-LABEL: test_expandload_v64i16:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    li a1, 64
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e8, m4, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a2, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a2, e16, m8, ta, ma
+; CHECK-VRGATHER-NEXT:    vle16.v v16, (a0)
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e16, m8, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v24, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v16, v24, v0.t
+; CHECK-VRGATHER-NEXT:    ret
+;
+; CHECK-INDEXED-LABEL: test_expandload_v64i16:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    li a1, 64
+; CHECK-INDEXED-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; CHECK-INDEXED-NEXT:    viota.m v16, v0
+; CHECK-INDEXED-NEXT:    vsll.vi v16, v16, 1, v0.t
+; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e16, m8, ta, mu
+; CHECK-INDEXED-NEXT:    vluxei16.v v8, (a0), v16, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <64 x i16> @llvm.masked.expandload.v64i16(ptr align 2 %base, <64 x i1> %mask, <64 x i16> %passthru)
   ret <64 x i16> %res
 }
@@ -502,149 +929,390 @@ define <64 x i16> @test_expandload_v64i16_all_ones(ptr %base, <64 x i16> %passth
 }
 
 define <128 x i16> @test_expandload_v128i16(ptr %base, <128 x i1> %mask, <128 x i16> %passthru) {
-; CHECK-RV32-LABEL: test_expandload_v128i16:
-; CHECK-RV32:       # %bb.0:
-; CHECK-RV32-NEXT:    addi sp, sp, -16
-; CHECK-RV32-NEXT:    .cfi_def_cfa_offset 16
-; CHECK-RV32-NEXT:    csrr a1, vlenb
-; CHECK-RV32-NEXT:    slli a1, a1, 4
-; CHECK-RV32-NEXT:    sub sp, sp, a1
-; CHECK-RV32-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
-; CHECK-RV32-NEXT:    csrr a1, vlenb
-; CHECK-RV32-NEXT:    slli a1, a1, 3
-; CHECK-RV32-NEXT:    add a1, sp, a1
-; CHECK-RV32-NEXT:    addi a1, a1, 16
-; CHECK-RV32-NEXT:    vs8r.v v8, (a1) # Unknown-size Folded Spill
-; CHECK-RV32-NEXT:    vmv1r.v v24, v0
-; CHECK-RV32-NEXT:    vsetivli zero, 8, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vslidedown.vi v0, v0, 8
-; CHECK-RV32-NEXT:    li a1, 64
-; CHECK-RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
-; CHECK-RV32-NEXT:    viota.m v8, v0
-; CHECK-RV32-NEXT:    vsll.vi v8, v8, 1, v0.t
-; CHECK-RV32-NEXT:    addi a2, sp, 16
-; CHECK-RV32-NEXT:    vs8r.v v8, (a2) # Unknown-size Folded Spill
-; CHECK-RV32-NEXT:    li a2, 32
-; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV32-NEXT:    vsrl.vx v8, v24, a2
-; CHECK-RV32-NEXT:    vmv.x.s a2, v8
-; CHECK-RV32-NEXT:    cpop a2, a2
-; CHECK-RV32-NEXT:    vmv.x.s a3, v24
-; CHECK-RV32-NEXT:    cpop a3, a3
-; CHECK-RV32-NEXT:    add a2, a3, a2
-; CHECK-RV32-NEXT:    slli a2, a2, 1
-; CHECK-RV32-NEXT:    add a2, a0, a2
-; CHECK-RV32-NEXT:    addi a3, sp, 16
-; CHECK-RV32-NEXT:    vl8r.v v8, (a3) # Unknown-size Folded Reload
-; CHECK-RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, mu
-; CHECK-RV32-NEXT:    vluxei16.v v16, (a2), v8, v0.t
-; CHECK-RV32-NEXT:    viota.m v8, v24
-; CHECK-RV32-NEXT:    vmv1r.v v0, v24
-; CHECK-RV32-NEXT:    vsetvli zero, zero, e16, m8, ta, ma
-; CHECK-RV32-NEXT:    vsll.vi v8, v8, 1, v0.t
-; CHECK-RV32-NEXT:    addi a1, sp, 16
-; CHECK-RV32-NEXT:    vs8r.v v8, (a1) # Unknown-size Folded Spill
-; CHECK-RV32-NEXT:    csrr a1, vlenb
-; CHECK-RV32-NEXT:    slli a1, a1, 3
-; CHECK-RV32-NEXT:    add a1, sp, a1
-; CHECK-RV32-NEXT:    addi a1, a1, 16
-; CHECK-RV32-NEXT:    vl8r.v v8, (a1) # Unknown-size Folded Reload
-; CHECK-RV32-NEXT:    addi a1, sp, 16
-; CHECK-RV32-NEXT:    vl8r.v v24, (a1) # Unknown-size Folded Reload
-; CHECK-RV32-NEXT:    vsetvli zero, zero, e16, m8, ta, mu
-; CHECK-RV32-NEXT:    vluxei16.v v8, (a0), v24, v0.t
-; CHECK-RV32-NEXT:    csrr a0, vlenb
-; CHECK-RV32-NEXT:    slli a0, a0, 4
-; CHECK-RV32-NEXT:    add sp, sp, a0
-; CHECK-RV32-NEXT:    addi sp, sp, 16
-; CHECK-RV32-NEXT:    ret
+; CHECK-VRGATHER-RV32-LABEL: test_expandload_v128i16:
+; CHECK-VRGATHER-RV32:       # %bb.0:
+; CHECK-VRGATHER-RV32-NEXT:    addi sp, sp, -16
+; CHECK-VRGATHER-RV32-NEXT:    .cfi_def_cfa_offset 16
+; CHECK-VRGATHER-RV32-NEXT:    csrr a1, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 40
+; CHECK-VRGATHER-RV32-NEXT:    mul a1, a1, a2
+; CHECK-VRGATHER-RV32-NEXT:    sub sp, sp, a1
+; CHECK-VRGATHER-RV32-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x28, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 40 * vlenb
+; CHECK-VRGATHER-RV32-NEXT:    csrr a1, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 24
+; CHECK-VRGATHER-RV32-NEXT:    mul a1, a1, a2
+; CHECK-VRGATHER-RV32-NEXT:    add a1, sp, a1
+; CHECK-VRGATHER-RV32-NEXT:    addi a1, a1, 16
+; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV32-NEXT:    csrr a1, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a1, 5
+; CHECK-VRGATHER-RV32-NEXT:    add a1, sp, a1
+; CHECK-VRGATHER-RV32-NEXT:    addi a1, a1, 16
+; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v8, (a1) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV32-NEXT:    li a1, 64
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a1, e8, m4, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vcpop.m a2, v0
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e16, m8, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vle16.v v8, (a0)
+; CHECK-VRGATHER-RV32-NEXT:    csrr a2, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a2, 4
+; CHECK-VRGATHER-RV32-NEXT:    add a2, sp, a2
+; CHECK-VRGATHER-RV32-NEXT:    addi a2, a2, 16
+; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v8, (a2) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 8, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslidedown.vi v7, v0, 8
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a1, e8, m4, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vcpop.m a2, v7
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 32
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vsrl.vx v25, v0, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a3, v25
+; CHECK-VRGATHER-RV32-NEXT:    cpop a3, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a4, v0
+; CHECK-VRGATHER-RV32-NEXT:    cpop a4, a4
+; CHECK-VRGATHER-RV32-NEXT:    add a3, a4, a3
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a3, 1
+; CHECK-VRGATHER-RV32-NEXT:    add a0, a0, a3
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e16, m8, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vle16.v v8, (a0)
+; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 3
+; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v8, (a0) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, mu
+; CHECK-VRGATHER-RV32-NEXT:    viota.m v8, v0
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, sp, 16
+; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v8, (a0) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 4
+; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 5
+; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, sp, 16
+; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV32-NEXT:    vrgather.vv v8, v16, v24, v0.t
+; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 5
+; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v8, (a0) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV32-NEXT:    viota.m v16, v7
+; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 4
+; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v0, v7
+; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 3
+; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 4
+; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    li a1, 24
+; CHECK-VRGATHER-RV32-NEXT:    mul a0, a0, a1
+; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV32-NEXT:    vrgather.vv v8, v16, v24, v0.t
+; CHECK-VRGATHER-RV32-NEXT:    vmv.v.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 5
+; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    li a1, 40
+; CHECK-VRGATHER-RV32-NEXT:    mul a0, a0, a1
+; CHECK-VRGATHER-RV32-NEXT:    add sp, sp, a0
+; CHECK-VRGATHER-RV32-NEXT:    addi sp, sp, 16
+; CHECK-VRGATHER-RV32-NEXT:    ret
 ;
-; CHECK-RV64-LABEL: test_expandload_v128i16:
-; CHECK-RV64:       # %bb.0:
-; CHECK-RV64-NEXT:    addi sp, sp, -16
-; CHECK-RV64-NEXT:    .cfi_def_cfa_offset 16
-; CHECK-RV64-NEXT:    csrr a1, vlenb
-; CHECK-RV64-NEXT:    slli a1, a1, 4
-; CHECK-RV64-NEXT:    sub sp, sp, a1
-; CHECK-RV64-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
-; CHECK-RV64-NEXT:    csrr a1, vlenb
-; CHECK-RV64-NEXT:    slli a1, a1, 3
-; CHECK-RV64-NEXT:    add a1, sp, a1
-; CHECK-RV64-NEXT:    addi a1, a1, 16
-; CHECK-RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
-; CHECK-RV64-NEXT:    vmv1r.v v7, v0
-; CHECK-RV64-NEXT:    vsetivli zero, 8, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vslidedown.vi v0, v0, 8
-; CHECK-RV64-NEXT:    li a1, 64
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
-; CHECK-RV64-NEXT:    viota.m v16, v0
-; CHECK-RV64-NEXT:    vsll.vi v16, v16, 1, v0.t
-; CHECK-RV64-NEXT:    addi a2, sp, 16
-; CHECK-RV64-NEXT:    vs8r.v v16, (a2) # Unknown-size Folded Spill
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.x.s a2, v7
-; CHECK-RV64-NEXT:    cpop a2, a2
-; CHECK-RV64-NEXT:    slli a2, a2, 1
-; CHECK-RV64-NEXT:    add a2, a0, a2
-; CHECK-RV64-NEXT:    csrr a3, vlenb
-; CHECK-RV64-NEXT:    slli a3, a3, 3
-; CHECK-RV64-NEXT:    add a3, sp, a3
-; CHECK-RV64-NEXT:    addi a3, a3, 16
-; CHECK-RV64-NEXT:    vl8r.v v16, (a3) # Unknown-size Folded Reload
-; CHECK-RV64-NEXT:    addi a3, sp, 16
-; CHECK-RV64-NEXT:    vl8r.v v24, (a3) # Unknown-size Folded Reload
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, mu
-; CHECK-RV64-NEXT:    vluxei16.v v16, (a2), v24, v0.t
-; CHECK-RV64-NEXT:    viota.m v24, v7
-; CHECK-RV64-NEXT:    vmv1r.v v0, v7
-; CHECK-RV64-NEXT:    vsetvli zero, zero, e16, m8, ta, ma
-; CHECK-RV64-NEXT:    vsll.vi v24, v24, 1, v0.t
-; CHECK-RV64-NEXT:    vsetvli zero, zero, e16, m8, ta, mu
-; CHECK-RV64-NEXT:    vluxei16.v v8, (a0), v24, v0.t
-; CHECK-RV64-NEXT:    csrr a0, vlenb
-; CHECK-RV64-NEXT:    slli a0, a0, 4
-; CHECK-RV64-NEXT:    add sp, sp, a0
-; CHECK-RV64-NEXT:    addi sp, sp, 16
-; CHECK-RV64-NEXT:    ret
+; CHECK-VRGATHER-RV64-LABEL: test_expandload_v128i16:
+; CHECK-VRGATHER-RV64:       # %bb.0:
+; CHECK-VRGATHER-RV64-NEXT:    addi sp, sp, -16
+; CHECK-VRGATHER-RV64-NEXT:    .cfi_def_cfa_offset 16
+; CHECK-VRGATHER-RV64-NEXT:    csrr a1, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 40
+; CHECK-VRGATHER-RV64-NEXT:    mul a1, a1, a2
+; CHECK-VRGATHER-RV64-NEXT:    sub sp, sp, a1
+; CHECK-VRGATHER-RV64-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x28, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 40 * vlenb
+; CHECK-VRGATHER-RV64-NEXT:    csrr a1, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a1, 5
+; CHECK-VRGATHER-RV64-NEXT:    add a1, sp, a1
+; CHECK-VRGATHER-RV64-NEXT:    addi a1, a1, 16
+; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 64
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vcpop.m a2, v0
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e16, m8, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vle16.v v16, (a0)
+; CHECK-VRGATHER-RV64-NEXT:    csrr a2, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 24
+; CHECK-VRGATHER-RV64-NEXT:    mul a2, a2, a3
+; CHECK-VRGATHER-RV64-NEXT:    add a2, sp, a2
+; CHECK-VRGATHER-RV64-NEXT:    addi a2, a2, 16
+; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v16, (a2) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 8, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslidedown.vi v7, v0, 8
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vcpop.m a2, v7
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.x.s a3, v0
+; CHECK-VRGATHER-RV64-NEXT:    cpop a3, a3
+; CHECK-VRGATHER-RV64-NEXT:    slli a3, a3, 1
+; CHECK-VRGATHER-RV64-NEXT:    add a0, a0, a3
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e16, m8, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vle16.v v16, (a0)
+; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 4
+; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, mu
+; CHECK-VRGATHER-RV64-NEXT:    viota.m v16, v0
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, sp, 16
+; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 24
+; CHECK-VRGATHER-RV64-NEXT:    mul a0, a0, a1
+; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, sp, 16
+; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV64-NEXT:    vrgather.vv v8, v16, v24, v0.t
+; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 3
+; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v8, (a0) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV64-NEXT:    viota.m v16, v7
+; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 24
+; CHECK-VRGATHER-RV64-NEXT:    mul a0, a0, a1
+; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v0, v7
+; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 5
+; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 4
+; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 24
+; CHECK-VRGATHER-RV64-NEXT:    mul a0, a0, a1
+; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV64-NEXT:    vrgather.vv v16, v24, v8, v0.t
+; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 3
+; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 40
+; CHECK-VRGATHER-RV64-NEXT:    mul a0, a0, a1
+; CHECK-VRGATHER-RV64-NEXT:    add sp, sp, a0
+; CHECK-VRGATHER-RV64-NEXT:    addi sp, sp, 16
+; CHECK-VRGATHER-RV64-NEXT:    ret
+;
+; CHECK-INDEXED-RV32-LABEL: test_expandload_v128i16:
+; CHECK-INDEXED-RV32:       # %bb.0:
+; CHECK-INDEXED-RV32-NEXT:    addi sp, sp, -16
+; CHECK-INDEXED-RV32-NEXT:    .cfi_def_cfa_offset 16
+; CHECK-INDEXED-RV32-NEXT:    csrr a1, vlenb
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a1, 4
+; CHECK-INDEXED-RV32-NEXT:    sub sp, sp, a1
+; CHECK-INDEXED-RV32-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
+; CHECK-INDEXED-RV32-NEXT:    csrr a1, vlenb
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a1, 3
+; CHECK-INDEXED-RV32-NEXT:    add a1, sp, a1
+; CHECK-INDEXED-RV32-NEXT:    addi a1, a1, 16
+; CHECK-INDEXED-RV32-NEXT:    vs8r.v v8, (a1) # Unknown-size Folded Spill
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v0
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 8, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vslidedown.vi v0, v0, 8
+; CHECK-INDEXED-RV32-NEXT:    li a1, 64
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    viota.m v8, v0
+; CHECK-INDEXED-RV32-NEXT:    vsll.vi v8, v8, 1, v0.t
+; CHECK-INDEXED-RV32-NEXT:    addi a2, sp, 16
+; CHECK-INDEXED-RV32-NEXT:    vs8r.v v8, (a2) # Unknown-size Folded Spill
+; CHECK-INDEXED-RV32-NEXT:    li a2, 32
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vsrl.vx v8, v24, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a2, v8
+; CHECK-INDEXED-RV32-NEXT:    cpop a2, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a3, v24
+; CHECK-INDEXED-RV32-NEXT:    cpop a3, a3
+; CHECK-INDEXED-RV32-NEXT:    add a2, a3, a2
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a2, 1
+; CHECK-INDEXED-RV32-NEXT:    add a2, a0, a2
+; CHECK-INDEXED-RV32-NEXT:    addi a3, sp, 16
+; CHECK-INDEXED-RV32-NEXT:    vl8r.v v8, (a3) # Unknown-size Folded Reload
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, mu
+; CHECK-INDEXED-RV32-NEXT:    vluxei16.v v16, (a2), v8, v0.t
+; CHECK-INDEXED-RV32-NEXT:    viota.m v8, v24
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v0, v24
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e16, m8, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vsll.vi v8, v8, 1, v0.t
+; CHECK-INDEXED-RV32-NEXT:    addi a1, sp, 16
+; CHECK-INDEXED-RV32-NEXT:    vs8r.v v8, (a1) # Unknown-size Folded Spill
+; CHECK-INDEXED-RV32-NEXT:    csrr a1, vlenb
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a1, 3
+; CHECK-INDEXED-RV32-NEXT:    add a1, sp, a1
+; CHECK-INDEXED-RV32-NEXT:    addi a1, a1, 16
+; CHECK-INDEXED-RV32-NEXT:    vl8r.v v8, (a1) # Unknown-size Folded Reload
+; CHECK-INDEXED-RV32-NEXT:    addi a1, sp, 16
+; CHECK-INDEXED-RV32-NEXT:    vl8r.v v24, (a1) # Unknown-size Folded Reload
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e16, m8, ta, mu
+; CHECK-INDEXED-RV32-NEXT:    vluxei16.v v8, (a0), v24, v0.t
+; CHECK-INDEXED-RV32-NEXT:    csrr a0, vlenb
+; CHECK-INDEXED-RV32-NEXT:    slli a0, a0, 4
+; CHECK-INDEXED-RV32-NEXT:    add sp, sp, a0
+; CHECK-INDEXED-RV32-NEXT:    addi sp, sp, 16
+; CHECK-INDEXED-RV32-NEXT:    ret
+;
+; CHECK-INDEXED-RV64-LABEL: test_expandload_v128i16:
+; CHECK-INDEXED-RV64:       # %bb.0:
+; CHECK-INDEXED-RV64-NEXT:    addi sp, sp, -16
+; CHECK-INDEXED-RV64-NEXT:    .cfi_def_cfa_offset 16
+; CHECK-INDEXED-RV64-NEXT:    csrr a1, vlenb
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a1, 4
+; CHECK-INDEXED-RV64-NEXT:    sub sp, sp, a1
+; CHECK-INDEXED-RV64-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
+; CHECK-INDEXED-RV64-NEXT:    csrr a1, vlenb
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a1, 3
+; CHECK-INDEXED-RV64-NEXT:    add a1, sp, a1
+; CHECK-INDEXED-RV64-NEXT:    addi a1, a1, 16
+; CHECK-INDEXED-RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v7, v0
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 8, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vslidedown.vi v0, v0, 8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 64
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    viota.m v16, v0
+; CHECK-INDEXED-RV64-NEXT:    vsll.vi v16, v16, 1, v0.t
+; CHECK-INDEXED-RV64-NEXT:    addi a2, sp, 16
+; CHECK-INDEXED-RV64-NEXT:    vs8r.v v16, (a2) # Unknown-size Folded Spill
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.x.s a2, v7
+; CHECK-INDEXED-RV64-NEXT:    cpop a2, a2
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a2, 1
+; CHECK-INDEXED-RV64-NEXT:    add a2, a0, a2
+; CHECK-INDEXED-RV64-NEXT:    csrr a3, vlenb
+; CHECK-INDEXED-RV64-NEXT:    slli a3, a3, 3
+; CHECK-INDEXED-RV64-NEXT:    add a3, sp, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a3, a3, 16
+; CHECK-INDEXED-RV64-NEXT:    vl8r.v v16, (a3) # Unknown-size Folded Reload
+; CHECK-INDEXED-RV64-NEXT:    addi a3, sp, 16
+; CHECK-INDEXED-RV64-NEXT:    vl8r.v v24, (a3) # Unknown-size Folded Reload
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, mu
+; CHECK-INDEXED-RV64-NEXT:    vluxei16.v v16, (a2), v24, v0.t
+; CHECK-INDEXED-RV64-NEXT:    viota.m v24, v7
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v0, v7
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e16, m8, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vsll.vi v24, v24, 1, v0.t
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e16, m8, ta, mu
+; CHECK-INDEXED-RV64-NEXT:    vluxei16.v v8, (a0), v24, v0.t
+; CHECK-INDEXED-RV64-NEXT:    csrr a0, vlenb
+; CHECK-INDEXED-RV64-NEXT:    slli a0, a0, 4
+; CHECK-INDEXED-RV64-NEXT:    add sp, sp, a0
+; CHECK-INDEXED-RV64-NEXT:    addi sp, sp, 16
+; CHECK-INDEXED-RV64-NEXT:    ret
   %res = call <128 x i16> @llvm.masked.expandload.v128i16(ptr align 2 %base, <128 x i1> %mask, <128 x i16> %passthru)
   ret <128 x i16> %res
 }
 
 define <128 x i16> @test_expandload_v128i16_all_ones(ptr %base, <128 x i16> %passthru) {
-; CHECK-RV32-LABEL: test_expandload_v128i16_all_ones:
-; CHECK-RV32:       # %bb.0:
-; CHECK-RV32-NEXT:    li a1, 64
-; CHECK-RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
-; CHECK-RV32-NEXT:    vle16.v v8, (a0)
-; CHECK-RV32-NEXT:    vmset.m v16
-; CHECK-RV32-NEXT:    li a2, 32
-; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV32-NEXT:    vsrl.vx v17, v16, a2
-; CHECK-RV32-NEXT:    vmv.x.s a2, v17
-; CHECK-RV32-NEXT:    cpop a2, a2
-; CHECK-RV32-NEXT:    vmv.x.s a3, v16
-; CHECK-RV32-NEXT:    cpop a3, a3
-; CHECK-RV32-NEXT:    add a2, a3, a2
-; CHECK-RV32-NEXT:    slli a2, a2, 1
-; CHECK-RV32-NEXT:    add a0, a0, a2
-; CHECK-RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
-; CHECK-RV32-NEXT:    vle16.v v16, (a0)
-; CHECK-RV32-NEXT:    ret
+; CHECK-VRGATHER-RV32-LABEL: test_expandload_v128i16_all_ones:
+; CHECK-VRGATHER-RV32:       # %bb.0:
+; CHECK-VRGATHER-RV32-NEXT:    li a1, 64
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vle16.v v8, (a0)
+; CHECK-VRGATHER-RV32-NEXT:    vmset.m v16
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 32
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vsrl.vx v17, v16, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a2, v17
+; CHECK-VRGATHER-RV32-NEXT:    cpop a2, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a3, v16
+; CHECK-VRGATHER-RV32-NEXT:    cpop a3, a3
+; CHECK-VRGATHER-RV32-NEXT:    add a2, a3, a2
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a2, 1
+; CHECK-VRGATHER-RV32-NEXT:    add a0, a0, a2
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vle16.v v16, (a0)
+; CHECK-VRGATHER-RV32-NEXT:    ret
+;
+; CHECK-VRGATHER-RV64-LABEL: test_expandload_v128i16_all_ones:
+; CHECK-VRGATHER-RV64:       # %bb.0:
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 64
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vle16.v v8, (a0)
+; CHECK-VRGATHER-RV64-NEXT:    vmset.m v16
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.x.s a2, v16
+; CHECK-VRGATHER-RV64-NEXT:    cpop a2, a2
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a2, 1
+; CHECK-VRGATHER-RV64-NEXT:    add a0, a0, a2
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vle16.v v16, (a0)
+; CHECK-VRGATHER-RV64-NEXT:    ret
+;
+; CHECK-INDEXED-RV32-LABEL: test_expandload_v128i16_all_ones:
+; CHECK-INDEXED-RV32:       # %bb.0:
+; CHECK-INDEXED-RV32-NEXT:    li a1, 64
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vle16.v v8, (a0)
+; CHECK-INDEXED-RV32-NEXT:    vmset.m v16
+; CHECK-INDEXED-RV32-NEXT:    li a2, 32
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vsrl.vx v17, v16, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a2, v17
+; CHECK-INDEXED-RV32-NEXT:    cpop a2, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a3, v16
+; CHECK-INDEXED-RV32-NEXT:    cpop a3, a3
+; CHECK-INDEXED-RV32-NEXT:    add a2, a3, a2
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a2, 1
+; CHECK-INDEXED-RV32-NEXT:    add a0, a0, a2
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vle16.v v16, (a0)
+; CHECK-INDEXED-RV32-NEXT:    ret
 ;
-; CHECK-RV64-LABEL: test_expandload_v128i16_all_ones:
-; CHECK-RV64:       # %bb.0:
-; CHECK-RV64-NEXT:    li a1, 64
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
-; CHECK-RV64-NEXT:    vle16.v v8, (a0)
-; CHECK-RV64-NEXT:    vmset.m v16
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.x.s a2, v16
-; CHECK-RV64-NEXT:    cpop a2, a2
-; CHECK-RV64-NEXT:    slli a2, a2, 1
-; CHECK-RV64-NEXT:    add a0, a0, a2
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
-; CHECK-RV64-NEXT:    vle16.v v16, (a0)
-; CHECK-RV64-NEXT:    ret
+; CHECK-INDEXED-RV64-LABEL: test_expandload_v128i16_all_ones:
+; CHECK-INDEXED-RV64:       # %bb.0:
+; CHECK-INDEXED-RV64-NEXT:    li a1, 64
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vle16.v v8, (a0)
+; CHECK-INDEXED-RV64-NEXT:    vmset.m v16
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.x.s a2, v16
+; CHECK-INDEXED-RV64-NEXT:    cpop a2, a2
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a2, 1
+; CHECK-INDEXED-RV64-NEXT:    add a0, a0, a2
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vle16.v v16, (a0)
+; CHECK-INDEXED-RV64-NEXT:    ret
   %res = call <128 x i16> @llvm.masked.expandload.v128i16(ptr align 2 %base, <128 x i1> splat (i1 true), <128 x i16> %passthru)
   ret <128 x i16> %res
 }
@@ -652,14 +1320,25 @@ define <128 x i16> @test_expandload_v128i16_all_ones(ptr %base, <128 x i16> %pas
 ; Load + expand for i32 type
 
 define <1 x i32> @test_expandload_v1i32(ptr %base, <1 x i1> %mask, <1 x i32> %passthru) {
-; CHECK-LABEL: test_expandload_v1i32:
-; CHECK:       # %bb.0:
-; CHECK-NEXT:    vsetivli zero, 1, e32, mf2, ta, ma
-; CHECK-NEXT:    viota.m v9, v0
-; CHECK-NEXT:    vsll.vi v9, v9, 2, v0.t
-; CHECK-NEXT:    vsetvli zero, zero, e32, mf2, ta, mu
-; CHECK-NEXT:    vluxei32.v v8, (a0), v9, v0.t
-; CHECK-NEXT:    ret
+; CHECK-VRGATHER-LABEL: test_expandload_v1i32:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e8, mf8, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e32, mf2, ta, ma
+; CHECK-VRGATHER-NEXT:    vle32.v v9, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e32, mf2, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v10, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-VRGATHER-NEXT:    ret
+;
+; CHECK-INDEXED-LABEL: test_expandload_v1i32:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    vsetivli zero, 1, e32, mf2, ta, ma
+; CHECK-INDEXED-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 2, v0.t
+; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e32, mf2, ta, mu
+; CHECK-INDEXED-NEXT:    vluxei32.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <1 x i32> @llvm.masked.expandload.v1i32(ptr align 4 %base, <1 x i1> %mask, <1 x i32> %passthru)
   ret <1 x i32> %res
 }
@@ -675,14 +1354,25 @@ define <1 x i32> @test_expandload_v1i32_all_ones(ptr %base, <1 x i32> %passthru)
 }
 
 define <2 x i32> @test_expandload_v2i32(ptr %base, <2 x i1> %mask, <2 x i32> %passthru) {
-; CHECK-LABEL: test_expandload_v2i32:
-; CHECK:       # %bb.0:
-; CHECK-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
-; CHECK-NEXT:    viota.m v9, v0
-; CHECK-NEXT:    vsll.vi v9, v9, 2, v0.t
-; CHECK-NEXT:    vsetvli zero, zero, e32, mf2, ta, mu
-; CHECK-NEXT:    vluxei32.v v8, (a0), v9, v0.t
-; CHECK-NEXT:    ret
+; CHECK-VRGATHER-LABEL: test_expandload_v2i32:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e32, mf2, ta, ma
+; CHECK-VRGATHER-NEXT:    vle32.v v9, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e32, mf2, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v10, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-VRGATHER-NEXT:    ret
+;
+; CHECK-INDEXED-LABEL: test_expandload_v2i32:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
+; CHECK-INDEXED-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 2, v0.t
+; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e32, mf2, ta, mu
+; CHECK-INDEXED-NEXT:    vluxei32.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <2 x i32> @llvm.masked.expandload.v2i32(ptr align 4 %base, <2 x i1> %mask, <2 x i32> %passthru)
   ret <2 x i32> %res
 }
@@ -698,14 +1388,25 @@ define <2 x i32> @test_expandload_v2i32_all_ones(ptr %base, <2 x i32> %passthru)
 }
 
 define <4 x i32> @test_expandload_v4i32(ptr %base, <4 x i1> %mask, <4 x i32> %passthru) {
-; CHECK-LABEL: test_expandload_v4i32:
-; CHECK:       # %bb.0:
-; CHECK-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
-; CHECK-NEXT:    viota.m v9, v0
-; CHECK-NEXT:    vsll.vi v9, v9, 2, v0.t
-; CHECK-NEXT:    vsetvli zero, zero, e32, m1, ta, mu
-; CHECK-NEXT:    vluxei32.v v8, (a0), v9, v0.t
-; CHECK-NEXT:    ret
+; CHECK-VRGATHER-LABEL: test_expandload_v4i32:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e8, mf4, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e32, m1, ta, ma
+; CHECK-VRGATHER-NEXT:    vle32.v v9, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e32, m1, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v10, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-VRGATHER-NEXT:    ret
+;
+; CHECK-INDEXED-LABEL: test_expandload_v4i32:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
+; CHECK-INDEXED-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 2, v0.t
+; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e32, m1, ta, mu
+; CHECK-INDEXED-NEXT:    vluxei32.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <4 x i32> @llvm.masked.expandload.v4i32(ptr align 4 %base, <4 x i1> %mask, <4 x i32> %passthru)
   ret <4 x i32> %res
 }
@@ -721,14 +1422,25 @@ define <4 x i32> @test_expandload_v4i32_all_ones(ptr %base, <4 x i32> %passthru)
 }
 
 define <8 x i32> @test_expandload_v8i32(ptr %base, <8 x i1> %mask, <8 x i32> %passthru) {
-; CHECK-LABEL: test_expandload_v8i32:
-; CHECK:       # %bb.0:
-; CHECK-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
-; CHECK-NEXT:    viota.m v10, v0
-; CHECK-NEXT:    vsll.vi v10, v10, 2, v0.t
-; CHECK-NEXT:    vsetvli zero, zero, e32, m2, ta, mu
-; CHECK-NEXT:    vluxei32.v v8, (a0), v10, v0.t
-; CHECK-NEXT:    ret
+; CHECK-VRGATHER-LABEL: test_expandload_v8i32:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e32, m2, ta, ma
+; CHECK-VRGATHER-NEXT:    vle32.v v10, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e32, m2, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v12, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v10, v12, v0.t
+; CHECK-VRGATHER-NEXT:    ret
+;
+; CHECK-INDEXED-LABEL: test_expandload_v8i32:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
+; CHECK-INDEXED-NEXT:    viota.m v10, v0
+; CHECK-INDEXED-NEXT:    vsll.vi v10, v10, 2, v0.t
+; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e32, m2, ta, mu
+; CHECK-INDEXED-NEXT:    vluxei32.v v8, (a0), v10, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <8 x i32> @llvm.masked.expandload.v8i32(ptr align 4 %base, <8 x i1> %mask, <8 x i32> %passthru)
   ret <8 x i32> %res
 }
@@ -744,14 +1456,25 @@ define <8 x i32> @test_expandload_v8i32_all_ones(ptr %base, <8 x i32> %passthru)
 }
 
 define <16 x i32> @test_expandload_v16i32(ptr %base, <16 x i1> %mask, <16 x i32> %passthru) {
-; CHECK-LABEL: test_expandload_v16i32:
-; CHECK:       # %bb.0:
-; CHECK-NEXT:    vsetivli zero, 16, e32, m4, ta, ma
-; CHECK-NEXT:    viota.m v12, v0
-; CHECK-NEXT:    vsll.vi v12, v12, 2, v0.t
-; CHECK-NEXT:    vsetvli zero, zero, e32, m4, ta, mu
-; CHECK-NEXT:    vluxei32.v v8, (a0), v12, v0.t
-; CHECK-NEXT:    ret
+; CHECK-VRGATHER-LABEL: test_expandload_v16i32:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 16, e8, m1, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e32, m4, ta, ma
+; CHECK-VRGATHER-NEXT:    vle32.v v12, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 16, e32, m4, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v16, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v12, v16, v0.t
+; CHECK-VRGATHER-NEXT:    ret
+;
+; CHECK-INDEXED-LABEL: test_expandload_v16i32:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    vsetivli zero, 16, e32, m4, ta, ma
+; CHECK-INDEXED-NEXT:    viota.m v12, v0
+; CHECK-INDEXED-NEXT:    vsll.vi v12, v12, 2, v0.t
+; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e32, m4, ta, mu
+; CHECK-INDEXED-NEXT:    vluxei32.v v8, (a0), v12, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <16 x i32> @llvm.masked.expandload.v16i32(ptr align 4 %base, <16 x i1> %mask, <16 x i32> %passthru)
   ret <16 x i32> %res
 }
@@ -767,15 +1490,27 @@ define <16 x i32> @test_expandload_v16i32_all_ones(ptr %base, <16 x i32> %passth
 }
 
 define <32 x i32> @test_expandload_v32i32(ptr %base, <32 x i1> %mask, <32 x i32> %passthru) {
-; CHECK-LABEL: test_expandload_v32i32:
-; CHECK:       # %bb.0:
-; CHECK-NEXT:    li a1, 32
-; CHECK-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
-; CHECK-NEXT:    viota.m v16, v0
-; CHECK-NEXT:    vsll.vi v16, v16, 2, v0.t
-; CHECK-NEXT:    vsetvli zero, zero, e32, m8, ta, mu
-; CHECK-NEXT:    vluxei32.v v8, (a0), v16, v0.t
-; CHECK-NEXT:    ret
+; CHECK-VRGATHER-LABEL: test_expandload_v32i32:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    li a1, 32
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e8, m2, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a2, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a2, e32, m8, ta, ma
+; CHECK-VRGATHER-NEXT:    vle32.v v16, (a0)
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e32, m8, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v24, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v16, v24, v0.t
+; CHECK-VRGATHER-NEXT:    ret
+;
+; CHECK-INDEXED-LABEL: test_expandload_v32i32:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    li a1, 32
+; CHECK-INDEXED-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
+; CHECK-INDEXED-NEXT:    viota.m v16, v0
+; CHECK-INDEXED-NEXT:    vsll.vi v16, v16, 2, v0.t
+; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e32, m8, ta, mu
+; CHECK-INDEXED-NEXT:    vluxei32.v v8, (a0), v16, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <32 x i32> @llvm.masked.expandload.v32i32(ptr align 4 %base, <32 x i1> %mask, <32 x i32> %passthru)
   ret <32 x i32> %res
 }
@@ -792,125 +1527,337 @@ define <32 x i32> @test_expandload_v32i32_all_ones(ptr %base, <32 x i32> %passth
 }
 
 define <64 x i32> @test_expandload_v64i32(ptr %base, <64 x i1> %mask, <64 x i32> %passthru) {
-; CHECK-RV32-LABEL: test_expandload_v64i32:
-; CHECK-RV32:       # %bb.0:
-; CHECK-RV32-NEXT:    addi sp, sp, -16
-; CHECK-RV32-NEXT:    .cfi_def_cfa_offset 16
-; CHECK-RV32-NEXT:    csrr a1, vlenb
-; CHECK-RV32-NEXT:    slli a1, a1, 4
-; CHECK-RV32-NEXT:    sub sp, sp, a1
-; CHECK-RV32-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
-; CHECK-RV32-NEXT:    csrr a1, vlenb
-; CHECK-RV32-NEXT:    slli a1, a1, 3
-; CHECK-RV32-NEXT:    add a1, sp, a1
-; CHECK-RV32-NEXT:    addi a1, a1, 16
-; CHECK-RV32-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
-; CHECK-RV32-NEXT:    vmv1r.v v7, v0
-; CHECK-RV32-NEXT:    vsetivli zero, 4, e8, mf2, ta, ma
-; CHECK-RV32-NEXT:    vslidedown.vi v0, v0, 4
-; CHECK-RV32-NEXT:    li a1, 32
-; CHECK-RV32-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
-; CHECK-RV32-NEXT:    viota.m v16, v0
-; CHECK-RV32-NEXT:    vsll.vi v16, v16, 2, v0.t
-; CHECK-RV32-NEXT:    addi a1, sp, 16
-; CHECK-RV32-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
-; CHECK-RV32-NEXT:    vmv.x.s a1, v7
-; CHECK-RV32-NEXT:    cpop a1, a1
-; CHECK-RV32-NEXT:    slli a1, a1, 2
-; CHECK-RV32-NEXT:    add a1, a0, a1
-; CHECK-RV32-NEXT:    csrr a2, vlenb
-; CHECK-RV32-NEXT:    slli a2, a2, 3
-; CHECK-RV32-NEXT:    add a2, sp, a2
-; CHECK-RV32-NEXT:    addi a2, a2, 16
-; CHECK-RV32-NEXT:    vl8r.v v16, (a2) # Unknown-size Folded Reload
-; CHECK-RV32-NEXT:    addi a2, sp, 16
-; CHECK-RV32-NEXT:    vl8r.v v24, (a2) # Unknown-size Folded Reload
-; CHECK-RV32-NEXT:    vsetvli zero, zero, e32, m8, ta, mu
-; CHECK-RV32-NEXT:    vluxei32.v v16, (a1), v24, v0.t
-; CHECK-RV32-NEXT:    viota.m v24, v7
-; CHECK-RV32-NEXT:    vmv1r.v v0, v7
-; CHECK-RV32-NEXT:    vsetvli zero, zero, e32, m8, ta, ma
-; CHECK-RV32-NEXT:    vsll.vi v24, v24, 2, v0.t
-; CHECK-RV32-NEXT:    vsetvli zero, zero, e32, m8, ta, mu
-; CHECK-RV32-NEXT:    vluxei32.v v8, (a0), v24, v0.t
-; CHECK-RV32-NEXT:    csrr a0, vlenb
-; CHECK-RV32-NEXT:    slli a0, a0, 4
-; CHECK-RV32-NEXT:    add sp, sp, a0
-; CHECK-RV32-NEXT:    addi sp, sp, 16
-; CHECK-RV32-NEXT:    ret
+; CHECK-VRGATHER-RV32-LABEL: test_expandload_v64i32:
+; CHECK-VRGATHER-RV32:       # %bb.0:
+; CHECK-VRGATHER-RV32-NEXT:    addi sp, sp, -16
+; CHECK-VRGATHER-RV32-NEXT:    .cfi_def_cfa_offset 16
+; CHECK-VRGATHER-RV32-NEXT:    csrr a1, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 40
+; CHECK-VRGATHER-RV32-NEXT:    mul a1, a1, a2
+; CHECK-VRGATHER-RV32-NEXT:    sub sp, sp, a1
+; CHECK-VRGATHER-RV32-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x28, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 40 * vlenb
+; CHECK-VRGATHER-RV32-NEXT:    csrr a1, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a1, 5
+; CHECK-VRGATHER-RV32-NEXT:    add a1, sp, a1
+; CHECK-VRGATHER-RV32-NEXT:    addi a1, a1, 16
+; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV32-NEXT:    li a1, 32
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a1, e8, m2, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vcpop.m a2, v0
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e32, m8, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vle32.v v16, (a0)
+; CHECK-VRGATHER-RV32-NEXT:    csrr a2, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 24
+; CHECK-VRGATHER-RV32-NEXT:    mul a2, a2, a3
+; CHECK-VRGATHER-RV32-NEXT:    add a2, sp, a2
+; CHECK-VRGATHER-RV32-NEXT:    addi a2, a2, 16
+; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v16, (a2) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 4, e8, mf2, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslidedown.vi v7, v0, 4
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vcpop.m a2, v7
+; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a3, v0
+; CHECK-VRGATHER-RV32-NEXT:    cpop a3, a3
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a3, 2
+; CHECK-VRGATHER-RV32-NEXT:    add a0, a0, a3
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e32, m8, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vle32.v v16, (a0)
+; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 4
+; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a1, e32, m8, ta, mu
+; CHECK-VRGATHER-RV32-NEXT:    viota.m v16, v0
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, sp, 16
+; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    li a1, 24
+; CHECK-VRGATHER-RV32-NEXT:    mul a0, a0, a1
+; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, sp, 16
+; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV32-NEXT:    vrgather.vv v8, v16, v24, v0.t
+; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 3
+; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v8, (a0) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV32-NEXT:    viota.m v16, v7
+; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    li a1, 24
+; CHECK-VRGATHER-RV32-NEXT:    mul a0, a0, a1
+; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v0, v7
+; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 5
+; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 4
+; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    li a1, 24
+; CHECK-VRGATHER-RV32-NEXT:    mul a0, a0, a1
+; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV32-NEXT:    vrgather.vv v16, v24, v8, v0.t
+; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 3
+; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    li a1, 40
+; CHECK-VRGATHER-RV32-NEXT:    mul a0, a0, a1
+; CHECK-VRGATHER-RV32-NEXT:    add sp, sp, a0
+; CHECK-VRGATHER-RV32-NEXT:    addi sp, sp, 16
+; CHECK-VRGATHER-RV32-NEXT:    ret
+;
+; CHECK-VRGATHER-RV64-LABEL: test_expandload_v64i32:
+; CHECK-VRGATHER-RV64:       # %bb.0:
+; CHECK-VRGATHER-RV64-NEXT:    addi sp, sp, -16
+; CHECK-VRGATHER-RV64-NEXT:    .cfi_def_cfa_offset 16
+; CHECK-VRGATHER-RV64-NEXT:    csrr a1, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 40
+; CHECK-VRGATHER-RV64-NEXT:    mul a1, a1, a2
+; CHECK-VRGATHER-RV64-NEXT:    sub sp, sp, a1
+; CHECK-VRGATHER-RV64-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x28, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 40 * vlenb
+; CHECK-VRGATHER-RV64-NEXT:    csrr a1, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a1, 5
+; CHECK-VRGATHER-RV64-NEXT:    add a1, sp, a1
+; CHECK-VRGATHER-RV64-NEXT:    addi a1, a1, 16
+; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 32
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m2, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vcpop.m a2, v0
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e32, m8, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vle32.v v16, (a0)
+; CHECK-VRGATHER-RV64-NEXT:    csrr a2, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 24
+; CHECK-VRGATHER-RV64-NEXT:    mul a2, a2, a3
+; CHECK-VRGATHER-RV64-NEXT:    add a2, sp, a2
+; CHECK-VRGATHER-RV64-NEXT:    addi a2, a2, 16
+; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v16, (a2) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 4, e8, mf2, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslidedown.vi v7, v0, 4
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vcpop.m a2, v7
+; CHECK-VRGATHER-RV64-NEXT:    vmv.x.s a3, v0
+; CHECK-VRGATHER-RV64-NEXT:    cpopw a3, a3
+; CHECK-VRGATHER-RV64-NEXT:    slli a3, a3, 2
+; CHECK-VRGATHER-RV64-NEXT:    add a0, a0, a3
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e32, m8, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vle32.v v16, (a0)
+; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 4
+; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e32, m8, ta, mu
+; CHECK-VRGATHER-RV64-NEXT:    viota.m v16, v0
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, sp, 16
+; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 24
+; CHECK-VRGATHER-RV64-NEXT:    mul a0, a0, a1
+; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, sp, 16
+; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV64-NEXT:    vrgather.vv v8, v16, v24, v0.t
+; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 3
+; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v8, (a0) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV64-NEXT:    viota.m v16, v7
+; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 24
+; CHECK-VRGATHER-RV64-NEXT:    mul a0, a0, a1
+; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v0, v7
+; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 5
+; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 4
+; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 24
+; CHECK-VRGATHER-RV64-NEXT:    mul a0, a0, a1
+; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV64-NEXT:    vrgather.vv v16, v24, v8, v0.t
+; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 3
+; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 40
+; CHECK-VRGATHER-RV64-NEXT:    mul a0, a0, a1
+; CHECK-VRGATHER-RV64-NEXT:    add sp, sp, a0
+; CHECK-VRGATHER-RV64-NEXT:    addi sp, sp, 16
+; CHECK-VRGATHER-RV64-NEXT:    ret
 ;
-; CHECK-RV64-LABEL: test_expandload_v64i32:
-; CHECK-RV64:       # %bb.0:
-; CHECK-RV64-NEXT:    addi sp, sp, -16
-; CHECK-RV64-NEXT:    .cfi_def_cfa_offset 16
-; CHECK-RV64-NEXT:    csrr a1, vlenb
-; CHECK-RV64-NEXT:    slli a1, a1, 4
-; CHECK-RV64-NEXT:    sub sp, sp, a1
-; CHECK-RV64-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
-; CHECK-RV64-NEXT:    csrr a1, vlenb
-; CHECK-RV64-NEXT:    slli a1, a1, 3
-; CHECK-RV64-NEXT:    add a1, sp, a1
-; CHECK-RV64-NEXT:    addi a1, a1, 16
-; CHECK-RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
-; CHECK-RV64-NEXT:    vmv1r.v v7, v0
-; CHECK-RV64-NEXT:    vsetivli zero, 4, e8, mf2, ta, ma
-; CHECK-RV64-NEXT:    vslidedown.vi v0, v0, 4
-; CHECK-RV64-NEXT:    li a1, 32
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
-; CHECK-RV64-NEXT:    viota.m v16, v0
-; CHECK-RV64-NEXT:    vsll.vi v16, v16, 2, v0.t
-; CHECK-RV64-NEXT:    addi a1, sp, 16
-; CHECK-RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
-; CHECK-RV64-NEXT:    vmv.x.s a1, v7
-; CHECK-RV64-NEXT:    cpopw a1, a1
-; CHECK-RV64-NEXT:    slli a1, a1, 2
-; CHECK-RV64-NEXT:    add a1, a0, a1
-; CHECK-RV64-NEXT:    csrr a2, vlenb
-; CHECK-RV64-NEXT:    slli a2, a2, 3
-; CHECK-RV64-NEXT:    add a2, sp, a2
-; CHECK-RV64-NEXT:    addi a2, a2, 16
-; CHECK-RV64-NEXT:    vl8r.v v16, (a2) # Unknown-size Folded Reload
-; CHECK-RV64-NEXT:    addi a2, sp, 16
-; CHECK-RV64-NEXT:    vl8r.v v24, (a2) # Unknown-size Folded Reload
-; CHECK-RV64-NEXT:    vsetvli zero, zero, e32, m8, ta, mu
-; CHECK-RV64-NEXT:    vluxei32.v v16, (a1), v24, v0.t
-; CHECK-RV64-NEXT:    viota.m v24, v7
-; CHECK-RV64-NEXT:    vmv1r.v v0, v7
-; CHECK-RV64-NEXT:    vsetvli zero, zero, e32, m8, ta, ma
-; CHECK-RV64-NEXT:    vsll.vi v24, v24, 2, v0.t
-; CHECK-RV64-NEXT:    vsetvli zero, zero, e32, m8, ta, mu
-; CHECK-RV64-NEXT:    vluxei32.v v8, (a0), v24, v0.t
-; CHECK-RV64-NEXT:    csrr a0, vlenb
-; CHECK-RV64-NEXT:    slli a0, a0, 4
-; CHECK-RV64-NEXT:    add sp, sp, a0
-; CHECK-RV64-NEXT:    addi sp, sp, 16
-; CHECK-RV64-NEXT:    ret
+; CHECK-INDEXED-RV32-LABEL: test_expandload_v64i32:
+; CHECK-INDEXED-RV32:       # %bb.0:
+; CHECK-INDEXED-RV32-NEXT:    addi sp, sp, -16
+; CHECK-INDEXED-RV32-NEXT:    .cfi_def_cfa_offset 16
+; CHECK-INDEXED-RV32-NEXT:    csrr a1, vlenb
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a1, 4
+; CHECK-INDEXED-RV32-NEXT:    sub sp, sp, a1
+; CHECK-INDEXED-RV32-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
+; CHECK-INDEXED-RV32-NEXT:    csrr a1, vlenb
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a1, 3
+; CHECK-INDEXED-RV32-NEXT:    add a1, sp, a1
+; CHECK-INDEXED-RV32-NEXT:    addi a1, a1, 16
+; CHECK-INDEXED-RV32-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v7, v0
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 4, e8, mf2, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vslidedown.vi v0, v0, 4
+; CHECK-INDEXED-RV32-NEXT:    li a1, 32
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    viota.m v16, v0
+; CHECK-INDEXED-RV32-NEXT:    vsll.vi v16, v16, 2, v0.t
+; CHECK-INDEXED-RV32-NEXT:    addi a1, sp, 16
+; CHECK-INDEXED-RV32-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
+; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a1, v7
+; CHECK-INDEXED-RV32-NEXT:    cpop a1, a1
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a1, 2
+; CHECK-INDEXED-RV32-NEXT:    add a1, a0, a1
+; CHECK-INDEXED-RV32-NEXT:    csrr a2, vlenb
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a2, 3
+; CHECK-INDEXED-RV32-NEXT:    add a2, sp, a2
+; CHECK-INDEXED-RV32-NEXT:    addi a2, a2, 16
+; CHECK-INDEXED-RV32-NEXT:    vl8r.v v16, (a2) # Unknown-size Folded Reload
+; CHECK-INDEXED-RV32-NEXT:    addi a2, sp, 16
+; CHECK-INDEXED-RV32-NEXT:    vl8r.v v24, (a2) # Unknown-size Folded Reload
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e32, m8, ta, mu
+; CHECK-INDEXED-RV32-NEXT:    vluxei32.v v16, (a1), v24, v0.t
+; CHECK-INDEXED-RV32-NEXT:    viota.m v24, v7
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v0, v7
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e32, m8, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vsll.vi v24, v24, 2, v0.t
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e32, m8, ta, mu
+; CHECK-INDEXED-RV32-NEXT:    vluxei32.v v8, (a0), v24, v0.t
+; CHECK-INDEXED-RV32-NEXT:    csrr a0, vlenb
+; CHECK-INDEXED-RV32-NEXT:    slli a0, a0, 4
+; CHECK-INDEXED-RV32-NEXT:    add sp, sp, a0
+; CHECK-INDEXED-RV32-NEXT:    addi sp, sp, 16
+; CHECK-INDEXED-RV32-NEXT:    ret
+;
+; CHECK-INDEXED-RV64-LABEL: test_expandload_v64i32:
+; CHECK-INDEXED-RV64:       # %bb.0:
+; CHECK-INDEXED-RV64-NEXT:    addi sp, sp, -16
+; CHECK-INDEXED-RV64-NEXT:    .cfi_def_cfa_offset 16
+; CHECK-INDEXED-RV64-NEXT:    csrr a1, vlenb
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a1, 4
+; CHECK-INDEXED-RV64-NEXT:    sub sp, sp, a1
+; CHECK-INDEXED-RV64-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
+; CHECK-INDEXED-RV64-NEXT:    csrr a1, vlenb
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a1, 3
+; CHECK-INDEXED-RV64-NEXT:    add a1, sp, a1
+; CHECK-INDEXED-RV64-NEXT:    addi a1, a1, 16
+; CHECK-INDEXED-RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v7, v0
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 4, e8, mf2, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vslidedown.vi v0, v0, 4
+; CHECK-INDEXED-RV64-NEXT:    li a1, 32
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    viota.m v16, v0
+; CHECK-INDEXED-RV64-NEXT:    vsll.vi v16, v16, 2, v0.t
+; CHECK-INDEXED-RV64-NEXT:    addi a1, sp, 16
+; CHECK-INDEXED-RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
+; CHECK-INDEXED-RV64-NEXT:    vmv.x.s a1, v7
+; CHECK-INDEXED-RV64-NEXT:    cpopw a1, a1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a1, 2
+; CHECK-INDEXED-RV64-NEXT:    add a1, a0, a1
+; CHECK-INDEXED-RV64-NEXT:    csrr a2, vlenb
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a2, 3
+; CHECK-INDEXED-RV64-NEXT:    add a2, sp, a2
+; CHECK-INDEXED-RV64-NEXT:    addi a2, a2, 16
+; CHECK-INDEXED-RV64-NEXT:    vl8r.v v16, (a2) # Unknown-size Folded Reload
+; CHECK-INDEXED-RV64-NEXT:    addi a2, sp, 16
+; CHECK-INDEXED-RV64-NEXT:    vl8r.v v24, (a2) # Unknown-size Folded Reload
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e32, m8, ta, mu
+; CHECK-INDEXED-RV64-NEXT:    vluxei32.v v16, (a1), v24, v0.t
+; CHECK-INDEXED-RV64-NEXT:    viota.m v24, v7
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v0, v7
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e32, m8, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vsll.vi v24, v24, 2, v0.t
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e32, m8, ta, mu
+; CHECK-INDEXED-RV64-NEXT:    vluxei32.v v8, (a0), v24, v0.t
+; CHECK-INDEXED-RV64-NEXT:    csrr a0, vlenb
+; CHECK-INDEXED-RV64-NEXT:    slli a0, a0, 4
+; CHECK-INDEXED-RV64-NEXT:    add sp, sp, a0
+; CHECK-INDEXED-RV64-NEXT:    addi sp, sp, 16
+; CHECK-INDEXED-RV64-NEXT:    ret
   %res = call <64 x i32> @llvm.masked.expandload.v64i32(ptr align 4 %base, <64 x i1> %mask, <64 x i32> %passthru)
   ret <64 x i32> %res
 }
 
 define <64 x i32> @test_expandload_v64i32_all_ones(ptr %base, <64 x i32> %passthru) {
-; CHECK-RV32-LABEL: test_expandload_v64i32_all_ones:
-; CHECK-RV32:       # %bb.0:
-; CHECK-RV32-NEXT:    li a1, 32
-; CHECK-RV32-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
-; CHECK-RV32-NEXT:    vle32.v v8, (a0)
-; CHECK-RV32-NEXT:    vmset.m v16
-; CHECK-RV32-NEXT:    vmv.x.s a1, v16
-; CHECK-RV32-NEXT:    cpop a1, a1
-; CHECK-RV32-NEXT:    slli a1, a1, 2
-; CHECK-RV32-NEXT:    add a0, a0, a1
-; CHECK-RV32-NEXT:    vle32.v v16, (a0)
-; CHECK-RV32-NEXT:    ret
+; CHECK-VRGATHER-RV32-LABEL: test_expandload_v64i32_all_ones:
+; CHECK-VRGATHER-RV32:       # %bb.0:
+; CHECK-VRGATHER-RV32-NEXT:    li a1, 32
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vle32.v v8, (a0)
+; CHECK-VRGATHER-RV32-NEXT:    vmset.m v16
+; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a1, v16
+; CHECK-VRGATHER-RV32-NEXT:    cpop a1, a1
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a1, 2
+; CHECK-VRGATHER-RV32-NEXT:    add a0, a0, a1
+; CHECK-VRGATHER-RV32-NEXT:    vle32.v v16, (a0)
+; CHECK-VRGATHER-RV32-NEXT:    ret
+;
+; CHECK-VRGATHER-RV64-LABEL: test_expandload_v64i32_all_ones:
+; CHECK-VRGATHER-RV64:       # %bb.0:
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 32
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vle32.v v8, (a0)
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 128
+; CHECK-VRGATHER-RV64-NEXT:    vle32.v v16, (a0)
+; CHECK-VRGATHER-RV64-NEXT:    ret
+;
+; CHECK-INDEXED-RV32-LABEL: test_expandload_v64i32_all_ones:
+; CHECK-INDEXED-RV32:       # %bb.0:
+; CHECK-INDEXED-RV32-NEXT:    li a1, 32
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vle32.v v8, (a0)
+; CHECK-INDEXED-RV32-NEXT:    vmset.m v16
+; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a1, v16
+; CHECK-INDEXED-RV32-NEXT:    cpop a1, a1
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a1, 2
+; CHECK-INDEXED-RV32-NEXT:    add a0, a0, a1
+; CHECK-INDEXED-RV32-NEXT:    vle32.v v16, (a0)
+; CHECK-INDEXED-RV32-NEXT:    ret
 ;
-; CHECK-RV64-LABEL: test_expandload_v64i32_all_ones:
-; CHECK-RV64:       # %bb.0:
-; CHECK-RV64-NEXT:    li a1, 32
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
-; CHECK-RV64-NEXT:    vle32.v v8, (a0)
-; CHECK-RV64-NEXT:    addi a0, a0, 128
-; CHECK-RV64-NEXT:    vle32.v v16, (a0)
-; CHECK-RV64-NEXT:    ret
+; CHECK-INDEXED-RV64-LABEL: test_expandload_v64i32_all_ones:
+; CHECK-INDEXED-RV64:       # %bb.0:
+; CHECK-INDEXED-RV64-NEXT:    li a1, 32
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vle32.v v8, (a0)
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 128
+; CHECK-INDEXED-RV64-NEXT:    vle32.v v16, (a0)
+; CHECK-INDEXED-RV64-NEXT:    ret
   %res = call <64 x i32> @llvm.masked.expandload.v64i32(ptr align 4 %base, <64 x i1> splat (i1 true), <64 x i32> %passthru)
   ret <64 x i32> %res
 }
@@ -918,23 +1865,34 @@ define <64 x i32> @test_expandload_v64i32_all_ones(ptr %base, <64 x i32> %passth
 ; Load + expand for i64 type
 
 define <1 x i64> @test_expandload_v1i64(ptr %base, <1 x i1> %mask, <1 x i64> %passthru) {
-; CHECK-RV32-LABEL: test_expandload_v1i64:
-; CHECK-RV32:       # %bb.0:
-; CHECK-RV32-NEXT:    vsetivli zero, 1, e32, mf2, ta, ma
-; CHECK-RV32-NEXT:    viota.m v9, v0
-; CHECK-RV32-NEXT:    vsll.vi v9, v9, 3, v0.t
-; CHECK-RV32-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
-; CHECK-RV32-NEXT:    vluxei32.v v8, (a0), v9, v0.t
-; CHECK-RV32-NEXT:    ret
+; CHECK-VRGATHER-LABEL: test_expandload_v1i64:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e8, mf8, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
+; CHECK-VRGATHER-NEXT:    vle64.v v9, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e64, m1, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v10, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-VRGATHER-NEXT:    ret
 ;
-; CHECK-RV64-LABEL: test_expandload_v1i64:
-; CHECK-RV64:       # %bb.0:
-; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV64-NEXT:    viota.m v9, v0
-; CHECK-RV64-NEXT:    vsll.vi v9, v9, 3, v0.t
-; CHECK-RV64-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
-; CHECK-RV64-NEXT:    vluxei64.v v8, (a0), v9, v0.t
-; CHECK-RV64-NEXT:    ret
+; CHECK-INDEXED-RV32-LABEL: test_expandload_v1i64:
+; CHECK-INDEXED-RV32:       # %bb.0:
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e32, mf2, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-RV32-NEXT:    vsll.vi v9, v9, 3, v0.t
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
+; CHECK-INDEXED-RV32-NEXT:    vluxei32.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-RV32-NEXT:    ret
+;
+; CHECK-INDEXED-RV64-LABEL: test_expandload_v1i64:
+; CHECK-INDEXED-RV64:       # %bb.0:
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-RV64-NEXT:    vsll.vi v9, v9, 3, v0.t
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
+; CHECK-INDEXED-RV64-NEXT:    vluxei64.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-RV64-NEXT:    ret
   %res = call <1 x i64> @llvm.masked.expandload.v1i64(ptr align 8 %base, <1 x i1> %mask, <1 x i64> %passthru)
   ret <1 x i64> %res
 }
@@ -950,23 +1908,34 @@ define <1 x i64> @test_expandload_v1i64_all_ones(ptr %base, <1 x i64> %passthru)
 }
 
 define <2 x i64> @test_expandload_v2i64(ptr %base, <2 x i1> %mask, <2 x i64> %passthru) {
-; CHECK-RV32-LABEL: test_expandload_v2i64:
-; CHECK-RV32:       # %bb.0:
-; CHECK-RV32-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
-; CHECK-RV32-NEXT:    viota.m v9, v0
-; CHECK-RV32-NEXT:    vsll.vi v9, v9, 3, v0.t
-; CHECK-RV32-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
-; CHECK-RV32-NEXT:    vluxei32.v v8, (a0), v9, v0.t
-; CHECK-RV32-NEXT:    ret
+; CHECK-VRGATHER-LABEL: test_expandload_v2i64:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
+; CHECK-VRGATHER-NEXT:    vle64.v v9, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e64, m1, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v10, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-VRGATHER-NEXT:    ret
+;
+; CHECK-INDEXED-RV32-LABEL: test_expandload_v2i64:
+; CHECK-INDEXED-RV32:       # %bb.0:
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-RV32-NEXT:    vsll.vi v9, v9, 3, v0.t
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
+; CHECK-INDEXED-RV32-NEXT:    vluxei32.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-RV32-NEXT:    ret
 ;
-; CHECK-RV64-LABEL: test_expandload_v2i64:
-; CHECK-RV64:       # %bb.0:
-; CHECK-RV64-NEXT:    vsetivli zero, 2, e64, m1, ta, ma
-; CHECK-RV64-NEXT:    viota.m v9, v0
-; CHECK-RV64-NEXT:    vsll.vi v9, v9, 3, v0.t
-; CHECK-RV64-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
-; CHECK-RV64-NEXT:    vluxei64.v v8, (a0), v9, v0.t
-; CHECK-RV64-NEXT:    ret
+; CHECK-INDEXED-RV64-LABEL: test_expandload_v2i64:
+; CHECK-INDEXED-RV64:       # %bb.0:
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 2, e64, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-RV64-NEXT:    vsll.vi v9, v9, 3, v0.t
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
+; CHECK-INDEXED-RV64-NEXT:    vluxei64.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-RV64-NEXT:    ret
   %res = call <2 x i64> @llvm.masked.expandload.v2i64(ptr align 8 %base, <2 x i1> %mask, <2 x i64> %passthru)
   ret <2 x i64> %res
 }
@@ -982,23 +1951,34 @@ define <2 x i64> @test_expandload_v2i64_all_ones(ptr %base, <2 x i64> %passthru)
 }
 
 define <4 x i64> @test_expandload_v4i64(ptr %base, <4 x i1> %mask, <4 x i64> %passthru) {
-; CHECK-RV32-LABEL: test_expandload_v4i64:
-; CHECK-RV32:       # %bb.0:
-; CHECK-RV32-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
-; CHECK-RV32-NEXT:    viota.m v10, v0
-; CHECK-RV32-NEXT:    vsll.vi v10, v10, 3, v0.t
-; CHECK-RV32-NEXT:    vsetvli zero, zero, e64, m2, ta, mu
-; CHECK-RV32-NEXT:    vluxei32.v v8, (a0), v10, v0.t
-; CHECK-RV32-NEXT:    ret
+; CHECK-VRGATHER-LABEL: test_expandload_v4i64:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e8, mf4, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e64, m2, ta, ma
+; CHECK-VRGATHER-NEXT:    vle64.v v10, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e64, m2, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v12, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v10, v12, v0.t
+; CHECK-VRGATHER-NEXT:    ret
 ;
-; CHECK-RV64-LABEL: test_expandload_v4i64:
-; CHECK-RV64:       # %bb.0:
-; CHECK-RV64-NEXT:    vsetivli zero, 4, e64, m2, ta, ma
-; CHECK-RV64-NEXT:    viota.m v10, v0
-; CHECK-RV64-NEXT:    vsll.vi v10, v10, 3, v0.t
-; CHECK-RV64-NEXT:    vsetvli zero, zero, e64, m2, ta, mu
-; CHECK-RV64-NEXT:    vluxei64.v v8, (a0), v10, v0.t
-; CHECK-RV64-NEXT:    ret
+; CHECK-INDEXED-RV32-LABEL: test_expandload_v4i64:
+; CHECK-INDEXED-RV32:       # %bb.0:
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    viota.m v10, v0
+; CHECK-INDEXED-RV32-NEXT:    vsll.vi v10, v10, 3, v0.t
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e64, m2, ta, mu
+; CHECK-INDEXED-RV32-NEXT:    vluxei32.v v8, (a0), v10, v0.t
+; CHECK-INDEXED-RV32-NEXT:    ret
+;
+; CHECK-INDEXED-RV64-LABEL: test_expandload_v4i64:
+; CHECK-INDEXED-RV64:       # %bb.0:
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 4, e64, m2, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    viota.m v10, v0
+; CHECK-INDEXED-RV64-NEXT:    vsll.vi v10, v10, 3, v0.t
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e64, m2, ta, mu
+; CHECK-INDEXED-RV64-NEXT:    vluxei64.v v8, (a0), v10, v0.t
+; CHECK-INDEXED-RV64-NEXT:    ret
   %res = call <4 x i64> @llvm.masked.expandload.v4i64(ptr align 8 %base, <4 x i1> %mask, <4 x i64> %passthru)
   ret <4 x i64> %res
 }
@@ -1014,23 +1994,34 @@ define <4 x i64> @test_expandload_v4i64_all_ones(ptr %base, <4 x i64> %passthru)
 }
 
 define <8 x i64> @test_expandload_v8i64(ptr %base, <8 x i1> %mask, <8 x i64> %passthru) {
-; CHECK-RV32-LABEL: test_expandload_v8i64:
-; CHECK-RV32:       # %bb.0:
-; CHECK-RV32-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
-; CHECK-RV32-NEXT:    viota.m v12, v0
-; CHECK-RV32-NEXT:    vsll.vi v12, v12, 3, v0.t
-; CHECK-RV32-NEXT:    vsetvli zero, zero, e64, m4, ta, mu
-; CHECK-RV32-NEXT:    vluxei32.v v8, (a0), v12, v0.t
-; CHECK-RV32-NEXT:    ret
+; CHECK-VRGATHER-LABEL: test_expandload_v8i64:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e64, m4, ta, ma
+; CHECK-VRGATHER-NEXT:    vle64.v v12, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e64, m4, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v16, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v12, v16, v0.t
+; CHECK-VRGATHER-NEXT:    ret
+;
+; CHECK-INDEXED-RV32-LABEL: test_expandload_v8i64:
+; CHECK-INDEXED-RV32:       # %bb.0:
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    viota.m v12, v0
+; CHECK-INDEXED-RV32-NEXT:    vsll.vi v12, v12, 3, v0.t
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e64, m4, ta, mu
+; CHECK-INDEXED-RV32-NEXT:    vluxei32.v v8, (a0), v12, v0.t
+; CHECK-INDEXED-RV32-NEXT:    ret
 ;
-; CHECK-RV64-LABEL: test_expandload_v8i64:
-; CHECK-RV64:       # %bb.0:
-; CHECK-RV64-NEXT:    vsetivli zero, 8, e64, m4, ta, ma
-; CHECK-RV64-NEXT:    viota.m v12, v0
-; CHECK-RV64-NEXT:    vsll.vi v12, v12, 3, v0.t
-; CHECK-RV64-NEXT:    vsetvli zero, zero, e64, m4, ta, mu
-; CHECK-RV64-NEXT:    vluxei64.v v8, (a0), v12, v0.t
-; CHECK-RV64-NEXT:    ret
+; CHECK-INDEXED-RV64-LABEL: test_expandload_v8i64:
+; CHECK-INDEXED-RV64:       # %bb.0:
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 8, e64, m4, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    viota.m v12, v0
+; CHECK-INDEXED-RV64-NEXT:    vsll.vi v12, v12, 3, v0.t
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e64, m4, ta, mu
+; CHECK-INDEXED-RV64-NEXT:    vluxei64.v v8, (a0), v12, v0.t
+; CHECK-INDEXED-RV64-NEXT:    ret
   %res = call <8 x i64> @llvm.masked.expandload.v8i64(ptr align 8 %base, <8 x i1> %mask, <8 x i64> %passthru)
   ret <8 x i64> %res
 }
@@ -1046,23 +2037,34 @@ define <8 x i64> @test_expandload_v8i64_all_ones(ptr %base, <8 x i64> %passthru)
 }
 
 define <16 x i64> @test_expandload_v16i64(ptr %base, <16 x i1> %mask, <16 x i64> %passthru) {
-; CHECK-RV32-LABEL: test_expandload_v16i64:
-; CHECK-RV32:       # %bb.0:
-; CHECK-RV32-NEXT:    vsetivli zero, 16, e32, m4, ta, ma
-; CHECK-RV32-NEXT:    viota.m v16, v0
-; CHECK-RV32-NEXT:    vsll.vi v16, v16, 3, v0.t
-; CHECK-RV32-NEXT:    vsetvli zero, zero, e64, m8, ta, mu
-; CHECK-RV32-NEXT:    vluxei32.v v8, (a0), v16, v0.t
-; CHECK-RV32-NEXT:    ret
+; CHECK-VRGATHER-LABEL: test_expandload_v16i64:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 16, e8, m1, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e64, m8, ta, ma
+; CHECK-VRGATHER-NEXT:    vle64.v v16, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 16, e64, m8, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v24, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v16, v24, v0.t
+; CHECK-VRGATHER-NEXT:    ret
 ;
-; CHECK-RV64-LABEL: test_expandload_v16i64:
-; CHECK-RV64:       # %bb.0:
-; CHECK-RV64-NEXT:    vsetivli zero, 16, e64, m8, ta, ma
-; CHECK-RV64-NEXT:    viota.m v16, v0
-; CHECK-RV64-NEXT:    vsll.vi v16, v16, 3, v0.t
-; CHECK-RV64-NEXT:    vsetvli zero, zero, e64, m8, ta, mu
-; CHECK-RV64-NEXT:    vluxei64.v v8, (a0), v16, v0.t
-; CHECK-RV64-NEXT:    ret
+; CHECK-INDEXED-RV32-LABEL: test_expandload_v16i64:
+; CHECK-INDEXED-RV32:       # %bb.0:
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 16, e32, m4, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    viota.m v16, v0
+; CHECK-INDEXED-RV32-NEXT:    vsll.vi v16, v16, 3, v0.t
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e64, m8, ta, mu
+; CHECK-INDEXED-RV32-NEXT:    vluxei32.v v8, (a0), v16, v0.t
+; CHECK-INDEXED-RV32-NEXT:    ret
+;
+; CHECK-INDEXED-RV64-LABEL: test_expandload_v16i64:
+; CHECK-INDEXED-RV64:       # %bb.0:
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 16, e64, m8, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    viota.m v16, v0
+; CHECK-INDEXED-RV64-NEXT:    vsll.vi v16, v16, 3, v0.t
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e64, m8, ta, mu
+; CHECK-INDEXED-RV64-NEXT:    vluxei64.v v8, (a0), v16, v0.t
+; CHECK-INDEXED-RV64-NEXT:    ret
   %res = call <16 x i64> @llvm.masked.expandload.v16i64(ptr align 8 %base, <16 x i1> %mask, <16 x i64> %passthru)
   ret <16 x i64> %res
 }
@@ -1078,77 +2080,267 @@ define <16 x i64> @test_expandload_v16i64_all_ones(ptr %base, <16 x i64> %passth
 }
 
 define <32 x i64> @test_expandload_v32i64(ptr %base, <32 x i1> %mask, <32 x i64> %passthru) {
-; CHECK-RV32-LABEL: test_expandload_v32i64:
-; CHECK-RV32:       # %bb.0:
-; CHECK-RV32-NEXT:    vmv1r.v v24, v0
-; CHECK-RV32-NEXT:    vsetivli zero, 2, e8, mf4, ta, ma
-; CHECK-RV32-NEXT:    vslidedown.vi v0, v0, 2
-; CHECK-RV32-NEXT:    vsetivli zero, 16, e32, m4, ta, ma
-; CHECK-RV32-NEXT:    viota.m v28, v0
-; CHECK-RV32-NEXT:    vsll.vi v28, v28, 3, v0.t
-; CHECK-RV32-NEXT:    vsetvli zero, zero, e16, m2, ta, ma
-; CHECK-RV32-NEXT:    vmv.x.s a1, v24
-; CHECK-RV32-NEXT:    zext.h a1, a1
-; CHECK-RV32-NEXT:    cpop a1, a1
-; CHECK-RV32-NEXT:    slli a1, a1, 3
-; CHECK-RV32-NEXT:    add a1, a0, a1
-; CHECK-RV32-NEXT:    vsetvli zero, zero, e64, m8, ta, mu
-; CHECK-RV32-NEXT:    vluxei32.v v16, (a1), v28, v0.t
-; CHECK-RV32-NEXT:    vsetvli zero, zero, e32, m4, ta, ma
-; CHECK-RV32-NEXT:    viota.m v28, v24
-; CHECK-RV32-NEXT:    vmv1r.v v0, v24
-; CHECK-RV32-NEXT:    vsll.vi v28, v28, 3, v0.t
-; CHECK-RV32-NEXT:    vsetvli zero, zero, e64, m8, ta, mu
-; CHECK-RV32-NEXT:    vluxei32.v v8, (a0), v28, v0.t
-; CHECK-RV32-NEXT:    ret
+; CHECK-VRGATHER-RV32-LABEL: test_expandload_v32i64:
+; CHECK-VRGATHER-RV32:       # %bb.0:
+; CHECK-VRGATHER-RV32-NEXT:    addi sp, sp, -16
+; CHECK-VRGATHER-RV32-NEXT:    .cfi_def_cfa_offset 16
+; CHECK-VRGATHER-RV32-NEXT:    csrr a1, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 40
+; CHECK-VRGATHER-RV32-NEXT:    mul a1, a1, a2
+; CHECK-VRGATHER-RV32-NEXT:    sub sp, sp, a1
+; CHECK-VRGATHER-RV32-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x28, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 40 * vlenb
+; CHECK-VRGATHER-RV32-NEXT:    csrr a1, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a1, 5
+; CHECK-VRGATHER-RV32-NEXT:    add a1, sp, a1
+; CHECK-VRGATHER-RV32-NEXT:    addi a1, a1, 16
+; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 16, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a1, e16, m2, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vle64.v v16, (a0)
+; CHECK-VRGATHER-RV32-NEXT:    csrr a1, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 24
+; CHECK-VRGATHER-RV32-NEXT:    mul a1, a1, a2
+; CHECK-VRGATHER-RV32-NEXT:    add a1, sp, a1
+; CHECK-VRGATHER-RV32-NEXT:    addi a1, a1, 16
+; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a1, v0
+; CHECK-VRGATHER-RV32-NEXT:    zext.h a1, a1
+; CHECK-VRGATHER-RV32-NEXT:    cpop a1, a1
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a1, 3
+; CHECK-VRGATHER-RV32-NEXT:    add a0, a0, a1
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 2, e8, mf4, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslidedown.vi v7, v0, 2
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 16, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vcpop.m a1, v7
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a1, e64, m8, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vle64.v v16, (a0)
+; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 4
+; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 16, e64, m8, ta, mu
+; CHECK-VRGATHER-RV32-NEXT:    viota.m v16, v0
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, sp, 16
+; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    li a1, 24
+; CHECK-VRGATHER-RV32-NEXT:    mul a0, a0, a1
+; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, sp, 16
+; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV32-NEXT:    vrgather.vv v8, v16, v24, v0.t
+; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 3
+; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v8, (a0) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV32-NEXT:    viota.m v16, v7
+; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    li a1, 24
+; CHECK-VRGATHER-RV32-NEXT:    mul a0, a0, a1
+; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v0, v7
+; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 5
+; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 4
+; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    li a1, 24
+; CHECK-VRGATHER-RV32-NEXT:    mul a0, a0, a1
+; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV32-NEXT:    vrgather.vv v16, v24, v8, v0.t
+; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 3
+; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV32-NEXT:    li a1, 40
+; CHECK-VRGATHER-RV32-NEXT:    mul a0, a0, a1
+; CHECK-VRGATHER-RV32-NEXT:    add sp, sp, a0
+; CHECK-VRGATHER-RV32-NEXT:    addi sp, sp, 16
+; CHECK-VRGATHER-RV32-NEXT:    ret
+;
+; CHECK-VRGATHER-RV64-LABEL: test_expandload_v32i64:
+; CHECK-VRGATHER-RV64:       # %bb.0:
+; CHECK-VRGATHER-RV64-NEXT:    addi sp, sp, -16
+; CHECK-VRGATHER-RV64-NEXT:    .cfi_def_cfa_offset 16
+; CHECK-VRGATHER-RV64-NEXT:    csrr a1, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 40
+; CHECK-VRGATHER-RV64-NEXT:    mul a1, a1, a2
+; CHECK-VRGATHER-RV64-NEXT:    sub sp, sp, a1
+; CHECK-VRGATHER-RV64-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x28, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 40 * vlenb
+; CHECK-VRGATHER-RV64-NEXT:    csrr a1, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a1, 5
+; CHECK-VRGATHER-RV64-NEXT:    add a1, sp, a1
+; CHECK-VRGATHER-RV64-NEXT:    addi a1, a1, 16
+; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 16, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e16, m2, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vle64.v v16, (a0)
+; CHECK-VRGATHER-RV64-NEXT:    csrr a1, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 24
+; CHECK-VRGATHER-RV64-NEXT:    mul a1, a1, a2
+; CHECK-VRGATHER-RV64-NEXT:    add a1, sp, a1
+; CHECK-VRGATHER-RV64-NEXT:    addi a1, a1, 16
+; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV64-NEXT:    vmv.x.s a1, v0
+; CHECK-VRGATHER-RV64-NEXT:    zext.h a1, a1
+; CHECK-VRGATHER-RV64-NEXT:    cpopw a1, a1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a1, 3
+; CHECK-VRGATHER-RV64-NEXT:    add a0, a0, a1
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 2, e8, mf4, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslidedown.vi v7, v0, 2
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 16, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vcpop.m a1, v7
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e64, m8, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vle64.v v16, (a0)
+; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 4
+; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 16, e64, m8, ta, mu
+; CHECK-VRGATHER-RV64-NEXT:    viota.m v16, v0
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, sp, 16
+; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 24
+; CHECK-VRGATHER-RV64-NEXT:    mul a0, a0, a1
+; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, sp, 16
+; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV64-NEXT:    vrgather.vv v8, v16, v24, v0.t
+; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 3
+; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v8, (a0) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV64-NEXT:    viota.m v16, v7
+; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 24
+; CHECK-VRGATHER-RV64-NEXT:    mul a0, a0, a1
+; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v0, v7
+; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 5
+; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 4
+; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 24
+; CHECK-VRGATHER-RV64-NEXT:    mul a0, a0, a1
+; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV64-NEXT:    vrgather.vv v16, v24, v8, v0.t
+; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 3
+; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
+; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
+; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 40
+; CHECK-VRGATHER-RV64-NEXT:    mul a0, a0, a1
+; CHECK-VRGATHER-RV64-NEXT:    add sp, sp, a0
+; CHECK-VRGATHER-RV64-NEXT:    addi sp, sp, 16
+; CHECK-VRGATHER-RV64-NEXT:    ret
+;
+; CHECK-INDEXED-RV32-LABEL: test_expandload_v32i64:
+; CHECK-INDEXED-RV32:       # %bb.0:
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v0
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 2, e8, mf4, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vslidedown.vi v0, v0, 2
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 16, e32, m4, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    viota.m v28, v0
+; CHECK-INDEXED-RV32-NEXT:    vsll.vi v28, v28, 3, v0.t
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e16, m2, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a1, v24
+; CHECK-INDEXED-RV32-NEXT:    zext.h a1, a1
+; CHECK-INDEXED-RV32-NEXT:    cpop a1, a1
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a1, 3
+; CHECK-INDEXED-RV32-NEXT:    add a1, a0, a1
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e64, m8, ta, mu
+; CHECK-INDEXED-RV32-NEXT:    vluxei32.v v16, (a1), v28, v0.t
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e32, m4, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    viota.m v28, v24
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v0, v24
+; CHECK-INDEXED-RV32-NEXT:    vsll.vi v28, v28, 3, v0.t
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e64, m8, ta, mu
+; CHECK-INDEXED-RV32-NEXT:    vluxei32.v v8, (a0), v28, v0.t
+; CHECK-INDEXED-RV32-NEXT:    ret
 ;
-; CHECK-RV64-LABEL: test_expandload_v32i64:
-; CHECK-RV64:       # %bb.0:
-; CHECK-RV64-NEXT:    addi sp, sp, -16
-; CHECK-RV64-NEXT:    .cfi_def_cfa_offset 16
-; CHECK-RV64-NEXT:    csrr a1, vlenb
-; CHECK-RV64-NEXT:    slli a1, a1, 4
-; CHECK-RV64-NEXT:    sub sp, sp, a1
-; CHECK-RV64-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
-; CHECK-RV64-NEXT:    csrr a1, vlenb
-; CHECK-RV64-NEXT:    slli a1, a1, 3
-; CHECK-RV64-NEXT:    add a1, sp, a1
-; CHECK-RV64-NEXT:    addi a1, a1, 16
-; CHECK-RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
-; CHECK-RV64-NEXT:    vmv1r.v v7, v0
-; CHECK-RV64-NEXT:    vsetivli zero, 2, e8, mf4, ta, ma
-; CHECK-RV64-NEXT:    vslidedown.vi v0, v0, 2
-; CHECK-RV64-NEXT:    vsetivli zero, 16, e64, m8, ta, ma
-; CHECK-RV64-NEXT:    viota.m v16, v0
-; CHECK-RV64-NEXT:    vsll.vi v16, v16, 3, v0.t
-; CHECK-RV64-NEXT:    addi a1, sp, 16
-; CHECK-RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
-; CHECK-RV64-NEXT:    vsetvli zero, zero, e16, m2, ta, ma
-; CHECK-RV64-NEXT:    vmv.x.s a1, v7
-; CHECK-RV64-NEXT:    zext.h a1, a1
-; CHECK-RV64-NEXT:    cpopw a1, a1
-; CHECK-RV64-NEXT:    slli a1, a1, 3
-; CHECK-RV64-NEXT:    add a1, a0, a1
-; CHECK-RV64-NEXT:    csrr a2, vlenb
-; CHECK-RV64-NEXT:    slli a2, a2, 3
-; CHECK-RV64-NEXT:    add a2, sp, a2
-; CHECK-RV64-NEXT:    addi a2, a2, 16
-; CHECK-RV64-NEXT:    vl8r.v v16, (a2) # Unknown-size Folded Reload
-; CHECK-RV64-NEXT:    addi a2, sp, 16
-; CHECK-RV64-NEXT:    vl8r.v v24, (a2) # Unknown-size Folded Reload
-; CHECK-RV64-NEXT:    vsetvli zero, zero, e64, m8, ta, mu
-; CHECK-RV64-NEXT:    vluxei64.v v16, (a1), v24, v0.t
-; CHECK-RV64-NEXT:    viota.m v24, v7
-; CHECK-RV64-NEXT:    vmv1r.v v0, v7
-; CHECK-RV64-NEXT:    vsetvli zero, zero, e64, m8, ta, ma
-; CHECK-RV64-NEXT:    vsll.vi v24, v24, 3, v0.t
-; CHECK-RV64-NEXT:    vsetvli zero, zero, e64, m8, ta, mu
-; CHECK-RV64-NEXT:    vluxei64.v v8, (a0), v24, v0.t
-; CHECK-RV64-NEXT:    csrr a0, vlenb
-; CHECK-RV64-NEXT:    slli a0, a0, 4
-; CHECK-RV64-NEXT:    add sp, sp, a0
-; CHECK-RV64-NEXT:    addi sp, sp, 16
-; CHECK-RV64-NEXT:    ret
+; CHECK-INDEXED-RV64-LABEL: test_expandload_v32i64:
+; CHECK-INDEXED-RV64:       # %bb.0:
+; CHECK-INDEXED-RV64-NEXT:    addi sp, sp, -16
+; CHECK-INDEXED-RV64-NEXT:    .cfi_def_cfa_offset 16
+; CHECK-INDEXED-RV64-NEXT:    csrr a1, vlenb
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a1, 4
+; CHECK-INDEXED-RV64-NEXT:    sub sp, sp, a1
+; CHECK-INDEXED-RV64-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
+; CHECK-INDEXED-RV64-NEXT:    csrr a1, vlenb
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a1, 3
+; CHECK-INDEXED-RV64-NEXT:    add a1, sp, a1
+; CHECK-INDEXED-RV64-NEXT:    addi a1, a1, 16
+; CHECK-INDEXED-RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v7, v0
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 2, e8, mf4, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vslidedown.vi v0, v0, 2
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 16, e64, m8, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    viota.m v16, v0
+; CHECK-INDEXED-RV64-NEXT:    vsll.vi v16, v16, 3, v0.t
+; CHECK-INDEXED-RV64-NEXT:    addi a1, sp, 16
+; CHECK-INDEXED-RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e16, m2, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.x.s a1, v7
+; CHECK-INDEXED-RV64-NEXT:    zext.h a1, a1
+; CHECK-INDEXED-RV64-NEXT:    cpopw a1, a1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a1, 3
+; CHECK-INDEXED-RV64-NEXT:    add a1, a0, a1
+; CHECK-INDEXED-RV64-NEXT:    csrr a2, vlenb
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a2, 3
+; CHECK-INDEXED-RV64-NEXT:    add a2, sp, a2
+; CHECK-INDEXED-RV64-NEXT:    addi a2, a2, 16
+; CHECK-INDEXED-RV64-NEXT:    vl8r.v v16, (a2) # Unknown-size Folded Reload
+; CHECK-INDEXED-RV64-NEXT:    addi a2, sp, 16
+; CHECK-INDEXED-RV64-NEXT:    vl8r.v v24, (a2) # Unknown-size Folded Reload
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e64, m8, ta, mu
+; CHECK-INDEXED-RV64-NEXT:    vluxei64.v v16, (a1), v24, v0.t
+; CHECK-INDEXED-RV64-NEXT:    viota.m v24, v7
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v0, v7
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e64, m8, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vsll.vi v24, v24, 3, v0.t
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e64, m8, ta, mu
+; CHECK-INDEXED-RV64-NEXT:    vluxei64.v v8, (a0), v24, v0.t
+; CHECK-INDEXED-RV64-NEXT:    csrr a0, vlenb
+; CHECK-INDEXED-RV64-NEXT:    slli a0, a0, 4
+; CHECK-INDEXED-RV64-NEXT:    add sp, sp, a0
+; CHECK-INDEXED-RV64-NEXT:    addi sp, sp, 16
+; CHECK-INDEXED-RV64-NEXT:    ret
   %res = call <32 x i64> @llvm.masked.expandload.v32i64(ptr align 8 %base, <32 x i1> %mask, <32 x i64> %passthru)
   ret <32 x i64> %res
 }
@@ -1168,18556 +2360,37106 @@ define <32 x i64> @test_expandload_v32i64_all_ones(ptr %base, <32 x i64> %passth
 ; Tests that will exceed the range of i8 index.
 
 define <512 x i8> @test_expandload_v512i8(ptr %base, <512 x i1> %mask, <512 x i8> %passthru) vscale_range(16, 1024) {
-; CHECK-LABEL: test_expandload_v512i8:
-; CHECK:       # %bb.0:
-; CHECK-NEXT:    li a1, 512
-; CHECK-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
-; CHECK-NEXT:    viota.m v16, v0
-; CHECK-NEXT:    vsetvli zero, zero, e8, m4, ta, mu
-; CHECK-NEXT:    vluxei16.v v8, (a0), v16, v0.t
-; CHECK-NEXT:    ret
+; CHECK-VRGATHER-LABEL: test_expandload_v512i8:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    li a1, 512
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e8, m4, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a2, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a2, e8, m4, ta, ma
+; CHECK-VRGATHER-NEXT:    vle8.v v12, (a0)
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; CHECK-VRGATHER-NEXT:    viota.m v16, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, zero, e8, m4, ta, mu
+; CHECK-VRGATHER-NEXT:    vrgatherei16.vv v8, v12, v16, v0.t
+; CHECK-VRGATHER-NEXT:    ret
+;
+; CHECK-INDEXED-LABEL: test_expandload_v512i8:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    li a1, 512
+; CHECK-INDEXED-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; CHECK-INDEXED-NEXT:    viota.m v16, v0
+; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e8, m4, ta, mu
+; CHECK-INDEXED-NEXT:    vluxei16.v v8, (a0), v16, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <512 x i8> @llvm.masked.expandload.v512i8(ptr align 1 %base, <512 x i1> %mask, <512 x i8> %passthru)
   ret <512 x i8> %res
 }
 
 ; FIXME: We can split it in lowering.
 define <512 x i8> @test_expandload_v512i8_vlen512(ptr %base, <512 x i1> %mask, <512 x i8> %passthru) vscale_range(8, 1024) {
-; CHECK-RV32-LABEL: test_expandload_v512i8_vlen512:
-; CHECK-RV32:       # %bb.0:
-; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.x.s a3, v0
-; CHECK-RV32-NEXT:    andi a1, a3, 1
-; CHECK-RV32-NEXT:    beqz a1, .LBB61_1
-; CHECK-RV32-NEXT:    j .LBB61_544
-; CHECK-RV32-NEXT:  .LBB61_1: # %else
-; CHECK-RV32-NEXT:    andi a1, a3, 2
-; CHECK-RV32-NEXT:    beqz a1, .LBB61_2
-; CHECK-RV32-NEXT:    j .LBB61_545
-; CHECK-RV32-NEXT:  .LBB61_2: # %else2
-; CHECK-RV32-NEXT:    andi a1, a3, 4
-; CHECK-RV32-NEXT:    beqz a1, .LBB61_3
-; CHECK-RV32-NEXT:    j .LBB61_546
-; CHECK-RV32-NEXT:  .LBB61_3: # %else6
-; CHECK-RV32-NEXT:    andi a1, a3, 8
-; CHECK-RV32-NEXT:    beqz a1, .LBB61_4
-; CHECK-RV32-NEXT:    j .LBB61_547
-; CHECK-RV32-NEXT:  .LBB61_4: # %else10
-; CHECK-RV32-NEXT:    andi a1, a3, 16
-; CHECK-RV32-NEXT:    beqz a1, .LBB61_5
-; CHECK-RV32-NEXT:    j .LBB61_548
-; CHECK-RV32-NEXT:  .LBB61_5: # %else14
-; CHECK-RV32-NEXT:    andi a1, a3, 32
-; CHECK-RV32-NEXT:    beqz a1, .LBB61_6
-; CHECK-RV32-NEXT:    j .LBB61_549
-; CHECK-RV32-NEXT:  .LBB61_6: # %else18
-; CHECK-RV32-NEXT:    andi a1, a3, 64
-; CHECK-RV32-NEXT:    beqz a1, .LBB61_7
-; CHECK-RV32-NEXT:    j .LBB61_550
-; CHECK-RV32-NEXT:  .LBB61_7: # %else22
-; CHECK-RV32-NEXT:    andi a1, a3, 128
-; CHECK-RV32-NEXT:    beqz a1, .LBB61_8
-; CHECK-RV32-NEXT:    j .LBB61_551
-; CHECK-RV32-NEXT:  .LBB61_8: # %else26
-; CHECK-RV32-NEXT:    andi a1, a3, 256
-; CHECK-RV32-NEXT:    beqz a1, .LBB61_9
-; CHECK-RV32-NEXT:    j .LBB61_552
-; CHECK-RV32-NEXT:  .LBB61_9: # %else30
-; CHECK-RV32-NEXT:    andi a1, a3, 512
-; CHECK-RV32-NEXT:    beqz a1, .LBB61_10
-; CHECK-RV32-NEXT:    j .LBB61_553
-; CHECK-RV32-NEXT:  .LBB61_10: # %else34
-; CHECK-RV32-NEXT:    andi a1, a3, 1024
-; CHECK-RV32-NEXT:    beqz a1, .LBB61_11
-; CHECK-RV32-NEXT:    j .LBB61_554
-; CHECK-RV32-NEXT:  .LBB61_11: # %else38
-; CHECK-RV32-NEXT:    slli a1, a3, 20
-; CHECK-RV32-NEXT:    bgez a1, .LBB61_12
-; CHECK-RV32-NEXT:    j .LBB61_555
-; CHECK-RV32-NEXT:  .LBB61_12: # %else42
-; CHECK-RV32-NEXT:    slli a1, a3, 19
-; CHECK-RV32-NEXT:    bgez a1, .LBB61_13
-; CHECK-RV32-NEXT:    j .LBB61_556
-; CHECK-RV32-NEXT:  .LBB61_13: # %else46
-; CHECK-RV32-NEXT:    slli a1, a3, 18
-; CHECK-RV32-NEXT:    bgez a1, .LBB61_14
-; CHECK-RV32-NEXT:    j .LBB61_557
-; CHECK-RV32-NEXT:  .LBB61_14: # %else50
-; CHECK-RV32-NEXT:    slli a1, a3, 17
-; CHECK-RV32-NEXT:    bgez a1, .LBB61_15
-; CHECK-RV32-NEXT:    j .LBB61_558
-; CHECK-RV32-NEXT:  .LBB61_15: # %else54
-; CHECK-RV32-NEXT:    slli a1, a3, 16
-; CHECK-RV32-NEXT:    bgez a1, .LBB61_16
-; CHECK-RV32-NEXT:    j .LBB61_559
-; CHECK-RV32-NEXT:  .LBB61_16: # %else58
-; CHECK-RV32-NEXT:    slli a1, a3, 15
-; CHECK-RV32-NEXT:    bgez a1, .LBB61_17
-; CHECK-RV32-NEXT:    j .LBB61_560
-; CHECK-RV32-NEXT:  .LBB61_17: # %else62
-; CHECK-RV32-NEXT:    slli a1, a3, 14
-; CHECK-RV32-NEXT:    bgez a1, .LBB61_18
-; CHECK-RV32-NEXT:    j .LBB61_561
-; CHECK-RV32-NEXT:  .LBB61_18: # %else66
-; CHECK-RV32-NEXT:    slli a1, a3, 13
-; CHECK-RV32-NEXT:    bgez a1, .LBB61_19
-; CHECK-RV32-NEXT:    j .LBB61_562
-; CHECK-RV32-NEXT:  .LBB61_19: # %else70
-; CHECK-RV32-NEXT:    slli a1, a3, 12
-; CHECK-RV32-NEXT:    bgez a1, .LBB61_20
-; CHECK-RV32-NEXT:    j .LBB61_563
-; CHECK-RV32-NEXT:  .LBB61_20: # %else74
-; CHECK-RV32-NEXT:    slli a1, a3, 11
-; CHECK-RV32-NEXT:    bgez a1, .LBB61_21
-; CHECK-RV32-NEXT:    j .LBB61_564
-; CHECK-RV32-NEXT:  .LBB61_21: # %else78
-; CHECK-RV32-NEXT:    slli a1, a3, 10
-; CHECK-RV32-NEXT:    bgez a1, .LBB61_22
-; CHECK-RV32-NEXT:    j .LBB61_565
-; CHECK-RV32-NEXT:  .LBB61_22: # %else82
-; CHECK-RV32-NEXT:    slli a1, a3, 9
-; CHECK-RV32-NEXT:    bgez a1, .LBB61_23
-; CHECK-RV32-NEXT:    j .LBB61_566
-; CHECK-RV32-NEXT:  .LBB61_23: # %else86
-; CHECK-RV32-NEXT:    slli a1, a3, 8
-; CHECK-RV32-NEXT:    bgez a1, .LBB61_24
-; CHECK-RV32-NEXT:    j .LBB61_567
-; CHECK-RV32-NEXT:  .LBB61_24: # %else90
-; CHECK-RV32-NEXT:    slli a1, a3, 7
-; CHECK-RV32-NEXT:    bgez a1, .LBB61_25
-; CHECK-RV32-NEXT:    j .LBB61_568
-; CHECK-RV32-NEXT:  .LBB61_25: # %else94
-; CHECK-RV32-NEXT:    slli a1, a3, 6
-; CHECK-RV32-NEXT:    bgez a1, .LBB61_26
-; CHECK-RV32-NEXT:    j .LBB61_569
-; CHECK-RV32-NEXT:  .LBB61_26: # %else98
-; CHECK-RV32-NEXT:    slli a1, a3, 5
-; CHECK-RV32-NEXT:    bgez a1, .LBB61_27
-; CHECK-RV32-NEXT:    j .LBB61_570
-; CHECK-RV32-NEXT:  .LBB61_27: # %else102
-; CHECK-RV32-NEXT:    slli a1, a3, 4
-; CHECK-RV32-NEXT:    bgez a1, .LBB61_28
-; CHECK-RV32-NEXT:    j .LBB61_571
-; CHECK-RV32-NEXT:  .LBB61_28: # %else106
-; CHECK-RV32-NEXT:    slli a1, a3, 3
-; CHECK-RV32-NEXT:    bgez a1, .LBB61_30
-; CHECK-RV32-NEXT:  .LBB61_29: # %cond.load109
-; CHECK-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-RV32-NEXT:    vsetivli zero, 29, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 28
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:  .LBB61_30: # %else110
-; CHECK-RV32-NEXT:    slli a2, a3, 2
-; CHECK-RV32-NEXT:    li a1, 32
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_32
-; CHECK-RV32-NEXT:  # %bb.31: # %cond.load113
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    vsetivli zero, 30, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 29
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:  .LBB61_32: # %else114
-; CHECK-RV32-NEXT:    slli a2, a3, 1
-; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV32-NEXT:    vsrl.vx v16, v0, a1
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_34
-; CHECK-RV32-NEXT:  # %bb.33: # %cond.load117
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    vmv.s.x v17, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    vsetivli zero, 31, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vi v8, v17, 30
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:  .LBB61_34: # %else118
-; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.x.s a2, v16
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_35
-; CHECK-RV32-NEXT:    j .LBB61_572
-; CHECK-RV32-NEXT:  .LBB61_35: # %else122
-; CHECK-RV32-NEXT:    andi a3, a2, 1
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_36
-; CHECK-RV32-NEXT:    j .LBB61_573
-; CHECK-RV32-NEXT:  .LBB61_36: # %else126
-; CHECK-RV32-NEXT:    andi a3, a2, 2
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_37
-; CHECK-RV32-NEXT:    j .LBB61_574
-; CHECK-RV32-NEXT:  .LBB61_37: # %else130
-; CHECK-RV32-NEXT:    andi a3, a2, 4
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_38
-; CHECK-RV32-NEXT:    j .LBB61_575
-; CHECK-RV32-NEXT:  .LBB61_38: # %else134
-; CHECK-RV32-NEXT:    andi a3, a2, 8
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_39
-; CHECK-RV32-NEXT:    j .LBB61_576
-; CHECK-RV32-NEXT:  .LBB61_39: # %else138
-; CHECK-RV32-NEXT:    andi a3, a2, 16
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_40
-; CHECK-RV32-NEXT:    j .LBB61_577
-; CHECK-RV32-NEXT:  .LBB61_40: # %else142
-; CHECK-RV32-NEXT:    andi a3, a2, 32
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_41
-; CHECK-RV32-NEXT:    j .LBB61_578
-; CHECK-RV32-NEXT:  .LBB61_41: # %else146
-; CHECK-RV32-NEXT:    andi a3, a2, 64
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_42
-; CHECK-RV32-NEXT:    j .LBB61_579
-; CHECK-RV32-NEXT:  .LBB61_42: # %else150
-; CHECK-RV32-NEXT:    andi a3, a2, 128
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_43
-; CHECK-RV32-NEXT:    j .LBB61_580
-; CHECK-RV32-NEXT:  .LBB61_43: # %else154
-; CHECK-RV32-NEXT:    andi a3, a2, 256
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_44
-; CHECK-RV32-NEXT:    j .LBB61_581
-; CHECK-RV32-NEXT:  .LBB61_44: # %else158
-; CHECK-RV32-NEXT:    andi a3, a2, 512
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_45
-; CHECK-RV32-NEXT:    j .LBB61_582
-; CHECK-RV32-NEXT:  .LBB61_45: # %else162
-; CHECK-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_46
-; CHECK-RV32-NEXT:    j .LBB61_583
-; CHECK-RV32-NEXT:  .LBB61_46: # %else166
-; CHECK-RV32-NEXT:    slli a3, a2, 20
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_47
-; CHECK-RV32-NEXT:    j .LBB61_584
-; CHECK-RV32-NEXT:  .LBB61_47: # %else170
-; CHECK-RV32-NEXT:    slli a3, a2, 19
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_48
-; CHECK-RV32-NEXT:    j .LBB61_585
-; CHECK-RV32-NEXT:  .LBB61_48: # %else174
-; CHECK-RV32-NEXT:    slli a3, a2, 18
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_49
-; CHECK-RV32-NEXT:    j .LBB61_586
-; CHECK-RV32-NEXT:  .LBB61_49: # %else178
-; CHECK-RV32-NEXT:    slli a3, a2, 17
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_50
-; CHECK-RV32-NEXT:    j .LBB61_587
-; CHECK-RV32-NEXT:  .LBB61_50: # %else182
-; CHECK-RV32-NEXT:    slli a3, a2, 16
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_51
-; CHECK-RV32-NEXT:    j .LBB61_588
-; CHECK-RV32-NEXT:  .LBB61_51: # %else186
-; CHECK-RV32-NEXT:    slli a3, a2, 15
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_52
-; CHECK-RV32-NEXT:    j .LBB61_589
-; CHECK-RV32-NEXT:  .LBB61_52: # %else190
-; CHECK-RV32-NEXT:    slli a3, a2, 14
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_53
-; CHECK-RV32-NEXT:    j .LBB61_590
-; CHECK-RV32-NEXT:  .LBB61_53: # %else194
-; CHECK-RV32-NEXT:    slli a3, a2, 13
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_54
-; CHECK-RV32-NEXT:    j .LBB61_591
-; CHECK-RV32-NEXT:  .LBB61_54: # %else198
-; CHECK-RV32-NEXT:    slli a3, a2, 12
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_55
-; CHECK-RV32-NEXT:    j .LBB61_592
-; CHECK-RV32-NEXT:  .LBB61_55: # %else202
-; CHECK-RV32-NEXT:    slli a3, a2, 11
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_56
-; CHECK-RV32-NEXT:    j .LBB61_593
-; CHECK-RV32-NEXT:  .LBB61_56: # %else206
-; CHECK-RV32-NEXT:    slli a3, a2, 10
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_57
-; CHECK-RV32-NEXT:    j .LBB61_594
-; CHECK-RV32-NEXT:  .LBB61_57: # %else210
-; CHECK-RV32-NEXT:    slli a3, a2, 9
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_58
-; CHECK-RV32-NEXT:    j .LBB61_595
-; CHECK-RV32-NEXT:  .LBB61_58: # %else214
-; CHECK-RV32-NEXT:    slli a3, a2, 8
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_59
-; CHECK-RV32-NEXT:    j .LBB61_596
-; CHECK-RV32-NEXT:  .LBB61_59: # %else218
-; CHECK-RV32-NEXT:    slli a3, a2, 7
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_60
-; CHECK-RV32-NEXT:    j .LBB61_597
-; CHECK-RV32-NEXT:  .LBB61_60: # %else222
-; CHECK-RV32-NEXT:    slli a3, a2, 6
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_61
-; CHECK-RV32-NEXT:    j .LBB61_598
-; CHECK-RV32-NEXT:  .LBB61_61: # %else226
-; CHECK-RV32-NEXT:    slli a3, a2, 5
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_62
-; CHECK-RV32-NEXT:    j .LBB61_599
-; CHECK-RV32-NEXT:  .LBB61_62: # %else230
-; CHECK-RV32-NEXT:    slli a3, a2, 4
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_63
-; CHECK-RV32-NEXT:    j .LBB61_600
-; CHECK-RV32-NEXT:  .LBB61_63: # %else234
-; CHECK-RV32-NEXT:    slli a3, a2, 3
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_64
-; CHECK-RV32-NEXT:    j .LBB61_601
-; CHECK-RV32-NEXT:  .LBB61_64: # %else238
-; CHECK-RV32-NEXT:    slli a3, a2, 2
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_66
-; CHECK-RV32-NEXT:  .LBB61_65: # %cond.load241
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 62
-; CHECK-RV32-NEXT:    li a4, 61
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:  .LBB61_66: # %else242
-; CHECK-RV32-NEXT:    slli a3, a2, 1
-; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV32-NEXT:    vslidedown.vi v16, v0, 1
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_68
-; CHECK-RV32-NEXT:  # %bb.67: # %cond.load245
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    vmv.s.x v17, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 63
-; CHECK-RV32-NEXT:    li a4, 62
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v17, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:  .LBB61_68: # %else246
-; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.x.s a3, v16
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_69
-; CHECK-RV32-NEXT:    j .LBB61_602
-; CHECK-RV32-NEXT:  .LBB61_69: # %else250
-; CHECK-RV32-NEXT:    andi a2, a3, 1
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_70
-; CHECK-RV32-NEXT:    j .LBB61_603
-; CHECK-RV32-NEXT:  .LBB61_70: # %else254
-; CHECK-RV32-NEXT:    andi a2, a3, 2
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_71
-; CHECK-RV32-NEXT:    j .LBB61_604
-; CHECK-RV32-NEXT:  .LBB61_71: # %else258
-; CHECK-RV32-NEXT:    andi a2, a3, 4
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_72
-; CHECK-RV32-NEXT:    j .LBB61_605
-; CHECK-RV32-NEXT:  .LBB61_72: # %else262
-; CHECK-RV32-NEXT:    andi a2, a3, 8
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_73
-; CHECK-RV32-NEXT:    j .LBB61_606
-; CHECK-RV32-NEXT:  .LBB61_73: # %else266
-; CHECK-RV32-NEXT:    andi a2, a3, 16
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_74
-; CHECK-RV32-NEXT:    j .LBB61_607
-; CHECK-RV32-NEXT:  .LBB61_74: # %else270
-; CHECK-RV32-NEXT:    andi a2, a3, 32
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_75
-; CHECK-RV32-NEXT:    j .LBB61_608
-; CHECK-RV32-NEXT:  .LBB61_75: # %else274
-; CHECK-RV32-NEXT:    andi a2, a3, 64
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_76
-; CHECK-RV32-NEXT:    j .LBB61_609
-; CHECK-RV32-NEXT:  .LBB61_76: # %else278
-; CHECK-RV32-NEXT:    andi a2, a3, 128
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_77
-; CHECK-RV32-NEXT:    j .LBB61_610
-; CHECK-RV32-NEXT:  .LBB61_77: # %else282
-; CHECK-RV32-NEXT:    andi a2, a3, 256
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_78
-; CHECK-RV32-NEXT:    j .LBB61_611
-; CHECK-RV32-NEXT:  .LBB61_78: # %else286
-; CHECK-RV32-NEXT:    andi a2, a3, 512
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_79
-; CHECK-RV32-NEXT:    j .LBB61_612
-; CHECK-RV32-NEXT:  .LBB61_79: # %else290
-; CHECK-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_80
-; CHECK-RV32-NEXT:    j .LBB61_613
-; CHECK-RV32-NEXT:  .LBB61_80: # %else294
-; CHECK-RV32-NEXT:    slli a2, a3, 20
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_81
-; CHECK-RV32-NEXT:    j .LBB61_614
-; CHECK-RV32-NEXT:  .LBB61_81: # %else298
-; CHECK-RV32-NEXT:    slli a2, a3, 19
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_82
-; CHECK-RV32-NEXT:    j .LBB61_615
-; CHECK-RV32-NEXT:  .LBB61_82: # %else302
-; CHECK-RV32-NEXT:    slli a2, a3, 18
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_83
-; CHECK-RV32-NEXT:    j .LBB61_616
-; CHECK-RV32-NEXT:  .LBB61_83: # %else306
-; CHECK-RV32-NEXT:    slli a2, a3, 17
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_84
-; CHECK-RV32-NEXT:    j .LBB61_617
-; CHECK-RV32-NEXT:  .LBB61_84: # %else310
-; CHECK-RV32-NEXT:    slli a2, a3, 16
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_85
-; CHECK-RV32-NEXT:    j .LBB61_618
-; CHECK-RV32-NEXT:  .LBB61_85: # %else314
-; CHECK-RV32-NEXT:    slli a2, a3, 15
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_86
-; CHECK-RV32-NEXT:    j .LBB61_619
-; CHECK-RV32-NEXT:  .LBB61_86: # %else318
-; CHECK-RV32-NEXT:    slli a2, a3, 14
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_87
-; CHECK-RV32-NEXT:    j .LBB61_620
-; CHECK-RV32-NEXT:  .LBB61_87: # %else322
-; CHECK-RV32-NEXT:    slli a2, a3, 13
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_88
-; CHECK-RV32-NEXT:    j .LBB61_621
-; CHECK-RV32-NEXT:  .LBB61_88: # %else326
-; CHECK-RV32-NEXT:    slli a2, a3, 12
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_89
-; CHECK-RV32-NEXT:    j .LBB61_622
-; CHECK-RV32-NEXT:  .LBB61_89: # %else330
-; CHECK-RV32-NEXT:    slli a2, a3, 11
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_90
-; CHECK-RV32-NEXT:    j .LBB61_623
-; CHECK-RV32-NEXT:  .LBB61_90: # %else334
-; CHECK-RV32-NEXT:    slli a2, a3, 10
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_91
-; CHECK-RV32-NEXT:    j .LBB61_624
-; CHECK-RV32-NEXT:  .LBB61_91: # %else338
-; CHECK-RV32-NEXT:    slli a2, a3, 9
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_92
-; CHECK-RV32-NEXT:    j .LBB61_625
-; CHECK-RV32-NEXT:  .LBB61_92: # %else342
-; CHECK-RV32-NEXT:    slli a2, a3, 8
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_93
-; CHECK-RV32-NEXT:    j .LBB61_626
-; CHECK-RV32-NEXT:  .LBB61_93: # %else346
-; CHECK-RV32-NEXT:    slli a2, a3, 7
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_94
-; CHECK-RV32-NEXT:    j .LBB61_627
-; CHECK-RV32-NEXT:  .LBB61_94: # %else350
-; CHECK-RV32-NEXT:    slli a2, a3, 6
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_95
-; CHECK-RV32-NEXT:    j .LBB61_628
-; CHECK-RV32-NEXT:  .LBB61_95: # %else354
-; CHECK-RV32-NEXT:    slli a2, a3, 5
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_96
-; CHECK-RV32-NEXT:    j .LBB61_629
-; CHECK-RV32-NEXT:  .LBB61_96: # %else358
-; CHECK-RV32-NEXT:    slli a2, a3, 4
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_97
-; CHECK-RV32-NEXT:    j .LBB61_630
-; CHECK-RV32-NEXT:  .LBB61_97: # %else362
-; CHECK-RV32-NEXT:    slli a2, a3, 3
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_98
-; CHECK-RV32-NEXT:    j .LBB61_631
-; CHECK-RV32-NEXT:  .LBB61_98: # %else366
-; CHECK-RV32-NEXT:    slli a2, a3, 2
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_100
-; CHECK-RV32-NEXT:  .LBB61_99: # %cond.load369
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 94
-; CHECK-RV32-NEXT:    li a4, 93
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:  .LBB61_100: # %else370
-; CHECK-RV32-NEXT:    slli a2, a3, 1
-; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV32-NEXT:    vsrl.vx v16, v16, a1
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_102
-; CHECK-RV32-NEXT:  # %bb.101: # %cond.load373
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 95
-; CHECK-RV32-NEXT:    li a4, 94
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:  .LBB61_102: # %else374
-; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.x.s a2, v16
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_103
-; CHECK-RV32-NEXT:    j .LBB61_632
-; CHECK-RV32-NEXT:  .LBB61_103: # %else378
-; CHECK-RV32-NEXT:    andi a3, a2, 1
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_104
-; CHECK-RV32-NEXT:    j .LBB61_633
-; CHECK-RV32-NEXT:  .LBB61_104: # %else382
-; CHECK-RV32-NEXT:    andi a3, a2, 2
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_105
-; CHECK-RV32-NEXT:    j .LBB61_634
-; CHECK-RV32-NEXT:  .LBB61_105: # %else386
-; CHECK-RV32-NEXT:    andi a3, a2, 4
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_106
-; CHECK-RV32-NEXT:    j .LBB61_635
-; CHECK-RV32-NEXT:  .LBB61_106: # %else390
-; CHECK-RV32-NEXT:    andi a3, a2, 8
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_107
-; CHECK-RV32-NEXT:    j .LBB61_636
-; CHECK-RV32-NEXT:  .LBB61_107: # %else394
-; CHECK-RV32-NEXT:    andi a3, a2, 16
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_108
-; CHECK-RV32-NEXT:    j .LBB61_637
-; CHECK-RV32-NEXT:  .LBB61_108: # %else398
-; CHECK-RV32-NEXT:    andi a3, a2, 32
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_109
-; CHECK-RV32-NEXT:    j .LBB61_638
-; CHECK-RV32-NEXT:  .LBB61_109: # %else402
-; CHECK-RV32-NEXT:    andi a3, a2, 64
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_110
-; CHECK-RV32-NEXT:    j .LBB61_639
-; CHECK-RV32-NEXT:  .LBB61_110: # %else406
-; CHECK-RV32-NEXT:    andi a3, a2, 128
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_111
-; CHECK-RV32-NEXT:    j .LBB61_640
-; CHECK-RV32-NEXT:  .LBB61_111: # %else410
-; CHECK-RV32-NEXT:    andi a3, a2, 256
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_112
-; CHECK-RV32-NEXT:    j .LBB61_641
-; CHECK-RV32-NEXT:  .LBB61_112: # %else414
-; CHECK-RV32-NEXT:    andi a3, a2, 512
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_113
-; CHECK-RV32-NEXT:    j .LBB61_642
-; CHECK-RV32-NEXT:  .LBB61_113: # %else418
-; CHECK-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_114
-; CHECK-RV32-NEXT:    j .LBB61_643
-; CHECK-RV32-NEXT:  .LBB61_114: # %else422
-; CHECK-RV32-NEXT:    slli a3, a2, 20
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_115
-; CHECK-RV32-NEXT:    j .LBB61_644
-; CHECK-RV32-NEXT:  .LBB61_115: # %else426
-; CHECK-RV32-NEXT:    slli a3, a2, 19
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_116
-; CHECK-RV32-NEXT:    j .LBB61_645
-; CHECK-RV32-NEXT:  .LBB61_116: # %else430
-; CHECK-RV32-NEXT:    slli a3, a2, 18
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_117
-; CHECK-RV32-NEXT:    j .LBB61_646
-; CHECK-RV32-NEXT:  .LBB61_117: # %else434
-; CHECK-RV32-NEXT:    slli a3, a2, 17
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_118
-; CHECK-RV32-NEXT:    j .LBB61_647
-; CHECK-RV32-NEXT:  .LBB61_118: # %else438
-; CHECK-RV32-NEXT:    slli a3, a2, 16
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_119
-; CHECK-RV32-NEXT:    j .LBB61_648
-; CHECK-RV32-NEXT:  .LBB61_119: # %else442
-; CHECK-RV32-NEXT:    slli a3, a2, 15
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_120
-; CHECK-RV32-NEXT:    j .LBB61_649
-; CHECK-RV32-NEXT:  .LBB61_120: # %else446
-; CHECK-RV32-NEXT:    slli a3, a2, 14
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_121
-; CHECK-RV32-NEXT:    j .LBB61_650
-; CHECK-RV32-NEXT:  .LBB61_121: # %else450
-; CHECK-RV32-NEXT:    slli a3, a2, 13
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_122
-; CHECK-RV32-NEXT:    j .LBB61_651
-; CHECK-RV32-NEXT:  .LBB61_122: # %else454
-; CHECK-RV32-NEXT:    slli a3, a2, 12
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_123
-; CHECK-RV32-NEXT:    j .LBB61_652
-; CHECK-RV32-NEXT:  .LBB61_123: # %else458
-; CHECK-RV32-NEXT:    slli a3, a2, 11
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_124
-; CHECK-RV32-NEXT:    j .LBB61_653
-; CHECK-RV32-NEXT:  .LBB61_124: # %else462
-; CHECK-RV32-NEXT:    slli a3, a2, 10
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_125
-; CHECK-RV32-NEXT:    j .LBB61_654
-; CHECK-RV32-NEXT:  .LBB61_125: # %else466
-; CHECK-RV32-NEXT:    slli a3, a2, 9
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_126
-; CHECK-RV32-NEXT:    j .LBB61_655
-; CHECK-RV32-NEXT:  .LBB61_126: # %else470
-; CHECK-RV32-NEXT:    slli a3, a2, 8
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_127
-; CHECK-RV32-NEXT:    j .LBB61_656
-; CHECK-RV32-NEXT:  .LBB61_127: # %else474
-; CHECK-RV32-NEXT:    slli a3, a2, 7
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_128
-; CHECK-RV32-NEXT:    j .LBB61_657
-; CHECK-RV32-NEXT:  .LBB61_128: # %else478
-; CHECK-RV32-NEXT:    slli a3, a2, 6
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_129
-; CHECK-RV32-NEXT:    j .LBB61_658
-; CHECK-RV32-NEXT:  .LBB61_129: # %else482
-; CHECK-RV32-NEXT:    slli a3, a2, 5
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_130
-; CHECK-RV32-NEXT:    j .LBB61_659
-; CHECK-RV32-NEXT:  .LBB61_130: # %else486
-; CHECK-RV32-NEXT:    slli a3, a2, 4
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_131
-; CHECK-RV32-NEXT:    j .LBB61_660
-; CHECK-RV32-NEXT:  .LBB61_131: # %else490
-; CHECK-RV32-NEXT:    slli a3, a2, 3
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_132
-; CHECK-RV32-NEXT:    j .LBB61_661
-; CHECK-RV32-NEXT:  .LBB61_132: # %else494
-; CHECK-RV32-NEXT:    slli a3, a2, 2
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_134
-; CHECK-RV32-NEXT:  .LBB61_133: # %cond.load497
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 126
-; CHECK-RV32-NEXT:    li a4, 125
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:  .LBB61_134: # %else498
-; CHECK-RV32-NEXT:    slli a3, a2, 1
-; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV32-NEXT:    vslidedown.vi v16, v0, 2
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_136
-; CHECK-RV32-NEXT:  # %bb.135: # %cond.load501
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    vmv.s.x v18, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 127
-; CHECK-RV32-NEXT:    li a4, 126
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:  .LBB61_136: # %else502
-; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.x.s a3, v16
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_137
-; CHECK-RV32-NEXT:    j .LBB61_662
-; CHECK-RV32-NEXT:  .LBB61_137: # %else506
-; CHECK-RV32-NEXT:    andi a2, a3, 1
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_138
-; CHECK-RV32-NEXT:    j .LBB61_663
-; CHECK-RV32-NEXT:  .LBB61_138: # %else510
-; CHECK-RV32-NEXT:    andi a2, a3, 2
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_139
-; CHECK-RV32-NEXT:    j .LBB61_664
-; CHECK-RV32-NEXT:  .LBB61_139: # %else514
-; CHECK-RV32-NEXT:    andi a2, a3, 4
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_140
-; CHECK-RV32-NEXT:    j .LBB61_665
-; CHECK-RV32-NEXT:  .LBB61_140: # %else518
-; CHECK-RV32-NEXT:    andi a2, a3, 8
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_141
-; CHECK-RV32-NEXT:    j .LBB61_666
-; CHECK-RV32-NEXT:  .LBB61_141: # %else522
-; CHECK-RV32-NEXT:    andi a2, a3, 16
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_142
-; CHECK-RV32-NEXT:    j .LBB61_667
-; CHECK-RV32-NEXT:  .LBB61_142: # %else526
-; CHECK-RV32-NEXT:    andi a2, a3, 32
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_143
-; CHECK-RV32-NEXT:    j .LBB61_668
-; CHECK-RV32-NEXT:  .LBB61_143: # %else530
-; CHECK-RV32-NEXT:    andi a2, a3, 64
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_144
-; CHECK-RV32-NEXT:    j .LBB61_669
-; CHECK-RV32-NEXT:  .LBB61_144: # %else534
-; CHECK-RV32-NEXT:    andi a2, a3, 128
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_145
-; CHECK-RV32-NEXT:    j .LBB61_670
-; CHECK-RV32-NEXT:  .LBB61_145: # %else538
-; CHECK-RV32-NEXT:    andi a2, a3, 256
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_146
-; CHECK-RV32-NEXT:    j .LBB61_671
-; CHECK-RV32-NEXT:  .LBB61_146: # %else542
-; CHECK-RV32-NEXT:    andi a2, a3, 512
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_147
-; CHECK-RV32-NEXT:    j .LBB61_672
-; CHECK-RV32-NEXT:  .LBB61_147: # %else546
-; CHECK-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_148
-; CHECK-RV32-NEXT:    j .LBB61_673
-; CHECK-RV32-NEXT:  .LBB61_148: # %else550
-; CHECK-RV32-NEXT:    slli a2, a3, 20
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_149
-; CHECK-RV32-NEXT:    j .LBB61_674
-; CHECK-RV32-NEXT:  .LBB61_149: # %else554
-; CHECK-RV32-NEXT:    slli a2, a3, 19
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_150
-; CHECK-RV32-NEXT:    j .LBB61_675
-; CHECK-RV32-NEXT:  .LBB61_150: # %else558
-; CHECK-RV32-NEXT:    slli a2, a3, 18
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_151
-; CHECK-RV32-NEXT:    j .LBB61_676
-; CHECK-RV32-NEXT:  .LBB61_151: # %else562
-; CHECK-RV32-NEXT:    slli a2, a3, 17
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_152
-; CHECK-RV32-NEXT:    j .LBB61_677
-; CHECK-RV32-NEXT:  .LBB61_152: # %else566
-; CHECK-RV32-NEXT:    slli a2, a3, 16
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_153
-; CHECK-RV32-NEXT:    j .LBB61_678
-; CHECK-RV32-NEXT:  .LBB61_153: # %else570
-; CHECK-RV32-NEXT:    slli a2, a3, 15
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_154
-; CHECK-RV32-NEXT:    j .LBB61_679
-; CHECK-RV32-NEXT:  .LBB61_154: # %else574
-; CHECK-RV32-NEXT:    slli a2, a3, 14
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_155
-; CHECK-RV32-NEXT:    j .LBB61_680
-; CHECK-RV32-NEXT:  .LBB61_155: # %else578
-; CHECK-RV32-NEXT:    slli a2, a3, 13
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_156
-; CHECK-RV32-NEXT:    j .LBB61_681
-; CHECK-RV32-NEXT:  .LBB61_156: # %else582
-; CHECK-RV32-NEXT:    slli a2, a3, 12
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_157
-; CHECK-RV32-NEXT:    j .LBB61_682
-; CHECK-RV32-NEXT:  .LBB61_157: # %else586
-; CHECK-RV32-NEXT:    slli a2, a3, 11
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_158
-; CHECK-RV32-NEXT:    j .LBB61_683
-; CHECK-RV32-NEXT:  .LBB61_158: # %else590
-; CHECK-RV32-NEXT:    slli a2, a3, 10
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_159
-; CHECK-RV32-NEXT:    j .LBB61_684
-; CHECK-RV32-NEXT:  .LBB61_159: # %else594
-; CHECK-RV32-NEXT:    slli a2, a3, 9
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_160
-; CHECK-RV32-NEXT:    j .LBB61_685
-; CHECK-RV32-NEXT:  .LBB61_160: # %else598
-; CHECK-RV32-NEXT:    slli a2, a3, 8
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_161
-; CHECK-RV32-NEXT:    j .LBB61_686
-; CHECK-RV32-NEXT:  .LBB61_161: # %else602
-; CHECK-RV32-NEXT:    slli a2, a3, 7
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_162
-; CHECK-RV32-NEXT:    j .LBB61_687
-; CHECK-RV32-NEXT:  .LBB61_162: # %else606
-; CHECK-RV32-NEXT:    slli a2, a3, 6
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_163
-; CHECK-RV32-NEXT:    j .LBB61_688
-; CHECK-RV32-NEXT:  .LBB61_163: # %else610
-; CHECK-RV32-NEXT:    slli a2, a3, 5
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_164
-; CHECK-RV32-NEXT:    j .LBB61_689
-; CHECK-RV32-NEXT:  .LBB61_164: # %else614
-; CHECK-RV32-NEXT:    slli a2, a3, 4
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_165
-; CHECK-RV32-NEXT:    j .LBB61_690
-; CHECK-RV32-NEXT:  .LBB61_165: # %else618
-; CHECK-RV32-NEXT:    slli a2, a3, 3
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_166
-; CHECK-RV32-NEXT:    j .LBB61_691
-; CHECK-RV32-NEXT:  .LBB61_166: # %else622
-; CHECK-RV32-NEXT:    slli a2, a3, 2
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_168
-; CHECK-RV32-NEXT:  .LBB61_167: # %cond.load625
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 158
-; CHECK-RV32-NEXT:    li a4, 157
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:  .LBB61_168: # %else626
-; CHECK-RV32-NEXT:    slli a2, a3, 1
-; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV32-NEXT:    vsrl.vx v16, v16, a1
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_170
-; CHECK-RV32-NEXT:  # %bb.169: # %cond.load629
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 159
-; CHECK-RV32-NEXT:    li a4, 158
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:  .LBB61_170: # %else630
-; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.x.s a2, v16
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_171
-; CHECK-RV32-NEXT:    j .LBB61_692
-; CHECK-RV32-NEXT:  .LBB61_171: # %else634
-; CHECK-RV32-NEXT:    andi a3, a2, 1
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_172
-; CHECK-RV32-NEXT:    j .LBB61_693
-; CHECK-RV32-NEXT:  .LBB61_172: # %else638
-; CHECK-RV32-NEXT:    andi a3, a2, 2
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_173
-; CHECK-RV32-NEXT:    j .LBB61_694
-; CHECK-RV32-NEXT:  .LBB61_173: # %else642
-; CHECK-RV32-NEXT:    andi a3, a2, 4
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_174
-; CHECK-RV32-NEXT:    j .LBB61_695
-; CHECK-RV32-NEXT:  .LBB61_174: # %else646
-; CHECK-RV32-NEXT:    andi a3, a2, 8
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_175
-; CHECK-RV32-NEXT:    j .LBB61_696
-; CHECK-RV32-NEXT:  .LBB61_175: # %else650
-; CHECK-RV32-NEXT:    andi a3, a2, 16
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_176
-; CHECK-RV32-NEXT:    j .LBB61_697
-; CHECK-RV32-NEXT:  .LBB61_176: # %else654
-; CHECK-RV32-NEXT:    andi a3, a2, 32
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_177
-; CHECK-RV32-NEXT:    j .LBB61_698
-; CHECK-RV32-NEXT:  .LBB61_177: # %else658
-; CHECK-RV32-NEXT:    andi a3, a2, 64
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_178
-; CHECK-RV32-NEXT:    j .LBB61_699
-; CHECK-RV32-NEXT:  .LBB61_178: # %else662
-; CHECK-RV32-NEXT:    andi a3, a2, 128
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_179
-; CHECK-RV32-NEXT:    j .LBB61_700
-; CHECK-RV32-NEXT:  .LBB61_179: # %else666
-; CHECK-RV32-NEXT:    andi a3, a2, 256
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_180
-; CHECK-RV32-NEXT:    j .LBB61_701
-; CHECK-RV32-NEXT:  .LBB61_180: # %else670
-; CHECK-RV32-NEXT:    andi a3, a2, 512
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_181
-; CHECK-RV32-NEXT:    j .LBB61_702
-; CHECK-RV32-NEXT:  .LBB61_181: # %else674
-; CHECK-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_182
-; CHECK-RV32-NEXT:    j .LBB61_703
-; CHECK-RV32-NEXT:  .LBB61_182: # %else678
-; CHECK-RV32-NEXT:    slli a3, a2, 20
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_183
-; CHECK-RV32-NEXT:    j .LBB61_704
-; CHECK-RV32-NEXT:  .LBB61_183: # %else682
-; CHECK-RV32-NEXT:    slli a3, a2, 19
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_184
-; CHECK-RV32-NEXT:    j .LBB61_705
-; CHECK-RV32-NEXT:  .LBB61_184: # %else686
-; CHECK-RV32-NEXT:    slli a3, a2, 18
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_185
-; CHECK-RV32-NEXT:    j .LBB61_706
-; CHECK-RV32-NEXT:  .LBB61_185: # %else690
-; CHECK-RV32-NEXT:    slli a3, a2, 17
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_186
-; CHECK-RV32-NEXT:    j .LBB61_707
-; CHECK-RV32-NEXT:  .LBB61_186: # %else694
-; CHECK-RV32-NEXT:    slli a3, a2, 16
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_187
-; CHECK-RV32-NEXT:    j .LBB61_708
-; CHECK-RV32-NEXT:  .LBB61_187: # %else698
-; CHECK-RV32-NEXT:    slli a3, a2, 15
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_188
-; CHECK-RV32-NEXT:    j .LBB61_709
-; CHECK-RV32-NEXT:  .LBB61_188: # %else702
-; CHECK-RV32-NEXT:    slli a3, a2, 14
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_189
-; CHECK-RV32-NEXT:    j .LBB61_710
-; CHECK-RV32-NEXT:  .LBB61_189: # %else706
-; CHECK-RV32-NEXT:    slli a3, a2, 13
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_190
-; CHECK-RV32-NEXT:    j .LBB61_711
-; CHECK-RV32-NEXT:  .LBB61_190: # %else710
-; CHECK-RV32-NEXT:    slli a3, a2, 12
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_191
-; CHECK-RV32-NEXT:    j .LBB61_712
-; CHECK-RV32-NEXT:  .LBB61_191: # %else714
-; CHECK-RV32-NEXT:    slli a3, a2, 11
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_192
-; CHECK-RV32-NEXT:    j .LBB61_713
-; CHECK-RV32-NEXT:  .LBB61_192: # %else718
-; CHECK-RV32-NEXT:    slli a3, a2, 10
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_193
-; CHECK-RV32-NEXT:    j .LBB61_714
-; CHECK-RV32-NEXT:  .LBB61_193: # %else722
-; CHECK-RV32-NEXT:    slli a3, a2, 9
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_194
-; CHECK-RV32-NEXT:    j .LBB61_715
-; CHECK-RV32-NEXT:  .LBB61_194: # %else726
-; CHECK-RV32-NEXT:    slli a3, a2, 8
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_195
-; CHECK-RV32-NEXT:    j .LBB61_716
-; CHECK-RV32-NEXT:  .LBB61_195: # %else730
-; CHECK-RV32-NEXT:    slli a3, a2, 7
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_196
-; CHECK-RV32-NEXT:    j .LBB61_717
-; CHECK-RV32-NEXT:  .LBB61_196: # %else734
-; CHECK-RV32-NEXT:    slli a3, a2, 6
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_197
-; CHECK-RV32-NEXT:    j .LBB61_718
-; CHECK-RV32-NEXT:  .LBB61_197: # %else738
-; CHECK-RV32-NEXT:    slli a3, a2, 5
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_198
-; CHECK-RV32-NEXT:    j .LBB61_719
-; CHECK-RV32-NEXT:  .LBB61_198: # %else742
-; CHECK-RV32-NEXT:    slli a3, a2, 4
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_199
-; CHECK-RV32-NEXT:    j .LBB61_720
-; CHECK-RV32-NEXT:  .LBB61_199: # %else746
-; CHECK-RV32-NEXT:    slli a3, a2, 3
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_200
-; CHECK-RV32-NEXT:    j .LBB61_721
-; CHECK-RV32-NEXT:  .LBB61_200: # %else750
-; CHECK-RV32-NEXT:    slli a3, a2, 2
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_202
-; CHECK-RV32-NEXT:  .LBB61_201: # %cond.load753
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 190
-; CHECK-RV32-NEXT:    li a4, 189
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:  .LBB61_202: # %else754
-; CHECK-RV32-NEXT:    slli a3, a2, 1
-; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV32-NEXT:    vslidedown.vi v16, v0, 3
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_204
-; CHECK-RV32-NEXT:  # %bb.203: # %cond.load757
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    vmv.s.x v20, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 191
-; CHECK-RV32-NEXT:    li a4, 190
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:  .LBB61_204: # %else758
-; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.x.s a3, v16
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_205
-; CHECK-RV32-NEXT:    j .LBB61_722
-; CHECK-RV32-NEXT:  .LBB61_205: # %else762
-; CHECK-RV32-NEXT:    andi a2, a3, 1
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_206
-; CHECK-RV32-NEXT:    j .LBB61_723
-; CHECK-RV32-NEXT:  .LBB61_206: # %else766
-; CHECK-RV32-NEXT:    andi a2, a3, 2
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_207
-; CHECK-RV32-NEXT:    j .LBB61_724
-; CHECK-RV32-NEXT:  .LBB61_207: # %else770
-; CHECK-RV32-NEXT:    andi a2, a3, 4
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_208
-; CHECK-RV32-NEXT:    j .LBB61_725
-; CHECK-RV32-NEXT:  .LBB61_208: # %else774
-; CHECK-RV32-NEXT:    andi a2, a3, 8
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_209
-; CHECK-RV32-NEXT:    j .LBB61_726
-; CHECK-RV32-NEXT:  .LBB61_209: # %else778
-; CHECK-RV32-NEXT:    andi a2, a3, 16
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_210
-; CHECK-RV32-NEXT:    j .LBB61_727
-; CHECK-RV32-NEXT:  .LBB61_210: # %else782
-; CHECK-RV32-NEXT:    andi a2, a3, 32
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_211
-; CHECK-RV32-NEXT:    j .LBB61_728
-; CHECK-RV32-NEXT:  .LBB61_211: # %else786
-; CHECK-RV32-NEXT:    andi a2, a3, 64
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_212
-; CHECK-RV32-NEXT:    j .LBB61_729
-; CHECK-RV32-NEXT:  .LBB61_212: # %else790
-; CHECK-RV32-NEXT:    andi a2, a3, 128
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_213
-; CHECK-RV32-NEXT:    j .LBB61_730
-; CHECK-RV32-NEXT:  .LBB61_213: # %else794
-; CHECK-RV32-NEXT:    andi a2, a3, 256
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_214
-; CHECK-RV32-NEXT:    j .LBB61_731
-; CHECK-RV32-NEXT:  .LBB61_214: # %else798
-; CHECK-RV32-NEXT:    andi a2, a3, 512
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_215
-; CHECK-RV32-NEXT:    j .LBB61_732
-; CHECK-RV32-NEXT:  .LBB61_215: # %else802
-; CHECK-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_216
-; CHECK-RV32-NEXT:    j .LBB61_733
-; CHECK-RV32-NEXT:  .LBB61_216: # %else806
-; CHECK-RV32-NEXT:    slli a2, a3, 20
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_217
-; CHECK-RV32-NEXT:    j .LBB61_734
-; CHECK-RV32-NEXT:  .LBB61_217: # %else810
-; CHECK-RV32-NEXT:    slli a2, a3, 19
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_218
-; CHECK-RV32-NEXT:    j .LBB61_735
-; CHECK-RV32-NEXT:  .LBB61_218: # %else814
-; CHECK-RV32-NEXT:    slli a2, a3, 18
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_219
-; CHECK-RV32-NEXT:    j .LBB61_736
-; CHECK-RV32-NEXT:  .LBB61_219: # %else818
-; CHECK-RV32-NEXT:    slli a2, a3, 17
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_220
-; CHECK-RV32-NEXT:    j .LBB61_737
-; CHECK-RV32-NEXT:  .LBB61_220: # %else822
-; CHECK-RV32-NEXT:    slli a2, a3, 16
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_221
-; CHECK-RV32-NEXT:    j .LBB61_738
-; CHECK-RV32-NEXT:  .LBB61_221: # %else826
-; CHECK-RV32-NEXT:    slli a2, a3, 15
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_222
-; CHECK-RV32-NEXT:    j .LBB61_739
-; CHECK-RV32-NEXT:  .LBB61_222: # %else830
-; CHECK-RV32-NEXT:    slli a2, a3, 14
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_223
-; CHECK-RV32-NEXT:    j .LBB61_740
-; CHECK-RV32-NEXT:  .LBB61_223: # %else834
-; CHECK-RV32-NEXT:    slli a2, a3, 13
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_224
-; CHECK-RV32-NEXT:    j .LBB61_741
-; CHECK-RV32-NEXT:  .LBB61_224: # %else838
-; CHECK-RV32-NEXT:    slli a2, a3, 12
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_225
-; CHECK-RV32-NEXT:    j .LBB61_742
-; CHECK-RV32-NEXT:  .LBB61_225: # %else842
-; CHECK-RV32-NEXT:    slli a2, a3, 11
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_226
-; CHECK-RV32-NEXT:    j .LBB61_743
-; CHECK-RV32-NEXT:  .LBB61_226: # %else846
-; CHECK-RV32-NEXT:    slli a2, a3, 10
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_227
-; CHECK-RV32-NEXT:    j .LBB61_744
-; CHECK-RV32-NEXT:  .LBB61_227: # %else850
-; CHECK-RV32-NEXT:    slli a2, a3, 9
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_228
-; CHECK-RV32-NEXT:    j .LBB61_745
-; CHECK-RV32-NEXT:  .LBB61_228: # %else854
-; CHECK-RV32-NEXT:    slli a2, a3, 8
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_229
-; CHECK-RV32-NEXT:    j .LBB61_746
-; CHECK-RV32-NEXT:  .LBB61_229: # %else858
-; CHECK-RV32-NEXT:    slli a2, a3, 7
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_230
-; CHECK-RV32-NEXT:    j .LBB61_747
-; CHECK-RV32-NEXT:  .LBB61_230: # %else862
-; CHECK-RV32-NEXT:    slli a2, a3, 6
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_231
-; CHECK-RV32-NEXT:    j .LBB61_748
-; CHECK-RV32-NEXT:  .LBB61_231: # %else866
-; CHECK-RV32-NEXT:    slli a2, a3, 5
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_232
-; CHECK-RV32-NEXT:    j .LBB61_749
-; CHECK-RV32-NEXT:  .LBB61_232: # %else870
-; CHECK-RV32-NEXT:    slli a2, a3, 4
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_233
-; CHECK-RV32-NEXT:    j .LBB61_750
-; CHECK-RV32-NEXT:  .LBB61_233: # %else874
-; CHECK-RV32-NEXT:    slli a2, a3, 3
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_234
-; CHECK-RV32-NEXT:    j .LBB61_751
-; CHECK-RV32-NEXT:  .LBB61_234: # %else878
-; CHECK-RV32-NEXT:    slli a2, a3, 2
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_236
-; CHECK-RV32-NEXT:  .LBB61_235: # %cond.load881
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 222
-; CHECK-RV32-NEXT:    li a4, 221
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:  .LBB61_236: # %else882
-; CHECK-RV32-NEXT:    slli a2, a3, 1
-; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV32-NEXT:    vsrl.vx v16, v16, a1
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_238
-; CHECK-RV32-NEXT:  # %bb.237: # %cond.load885
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 223
-; CHECK-RV32-NEXT:    li a4, 222
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:  .LBB61_238: # %else886
-; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.x.s a2, v16
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_239
-; CHECK-RV32-NEXT:    j .LBB61_752
-; CHECK-RV32-NEXT:  .LBB61_239: # %else890
-; CHECK-RV32-NEXT:    andi a3, a2, 1
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_240
-; CHECK-RV32-NEXT:    j .LBB61_753
-; CHECK-RV32-NEXT:  .LBB61_240: # %else894
-; CHECK-RV32-NEXT:    andi a3, a2, 2
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_241
-; CHECK-RV32-NEXT:    j .LBB61_754
-; CHECK-RV32-NEXT:  .LBB61_241: # %else898
-; CHECK-RV32-NEXT:    andi a3, a2, 4
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_242
-; CHECK-RV32-NEXT:    j .LBB61_755
-; CHECK-RV32-NEXT:  .LBB61_242: # %else902
-; CHECK-RV32-NEXT:    andi a3, a2, 8
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_243
-; CHECK-RV32-NEXT:    j .LBB61_756
-; CHECK-RV32-NEXT:  .LBB61_243: # %else906
-; CHECK-RV32-NEXT:    andi a3, a2, 16
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_244
-; CHECK-RV32-NEXT:    j .LBB61_757
-; CHECK-RV32-NEXT:  .LBB61_244: # %else910
-; CHECK-RV32-NEXT:    andi a3, a2, 32
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_245
-; CHECK-RV32-NEXT:    j .LBB61_758
-; CHECK-RV32-NEXT:  .LBB61_245: # %else914
-; CHECK-RV32-NEXT:    andi a3, a2, 64
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_246
-; CHECK-RV32-NEXT:    j .LBB61_759
-; CHECK-RV32-NEXT:  .LBB61_246: # %else918
-; CHECK-RV32-NEXT:    andi a3, a2, 128
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_247
-; CHECK-RV32-NEXT:    j .LBB61_760
-; CHECK-RV32-NEXT:  .LBB61_247: # %else922
-; CHECK-RV32-NEXT:    andi a3, a2, 256
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_248
-; CHECK-RV32-NEXT:    j .LBB61_761
-; CHECK-RV32-NEXT:  .LBB61_248: # %else926
-; CHECK-RV32-NEXT:    andi a3, a2, 512
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_249
-; CHECK-RV32-NEXT:    j .LBB61_762
-; CHECK-RV32-NEXT:  .LBB61_249: # %else930
-; CHECK-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_250
-; CHECK-RV32-NEXT:    j .LBB61_763
-; CHECK-RV32-NEXT:  .LBB61_250: # %else934
-; CHECK-RV32-NEXT:    slli a3, a2, 20
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_251
-; CHECK-RV32-NEXT:    j .LBB61_764
-; CHECK-RV32-NEXT:  .LBB61_251: # %else938
-; CHECK-RV32-NEXT:    slli a3, a2, 19
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_252
-; CHECK-RV32-NEXT:    j .LBB61_765
-; CHECK-RV32-NEXT:  .LBB61_252: # %else942
-; CHECK-RV32-NEXT:    slli a3, a2, 18
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_253
-; CHECK-RV32-NEXT:    j .LBB61_766
-; CHECK-RV32-NEXT:  .LBB61_253: # %else946
-; CHECK-RV32-NEXT:    slli a3, a2, 17
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_254
-; CHECK-RV32-NEXT:    j .LBB61_767
-; CHECK-RV32-NEXT:  .LBB61_254: # %else950
-; CHECK-RV32-NEXT:    slli a3, a2, 16
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_255
-; CHECK-RV32-NEXT:    j .LBB61_768
-; CHECK-RV32-NEXT:  .LBB61_255: # %else954
-; CHECK-RV32-NEXT:    slli a3, a2, 15
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_256
-; CHECK-RV32-NEXT:    j .LBB61_769
-; CHECK-RV32-NEXT:  .LBB61_256: # %else958
-; CHECK-RV32-NEXT:    slli a3, a2, 14
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_257
-; CHECK-RV32-NEXT:    j .LBB61_770
-; CHECK-RV32-NEXT:  .LBB61_257: # %else962
-; CHECK-RV32-NEXT:    slli a3, a2, 13
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_258
-; CHECK-RV32-NEXT:    j .LBB61_771
-; CHECK-RV32-NEXT:  .LBB61_258: # %else966
-; CHECK-RV32-NEXT:    slli a3, a2, 12
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_259
-; CHECK-RV32-NEXT:    j .LBB61_772
-; CHECK-RV32-NEXT:  .LBB61_259: # %else970
-; CHECK-RV32-NEXT:    slli a3, a2, 11
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_260
-; CHECK-RV32-NEXT:    j .LBB61_773
-; CHECK-RV32-NEXT:  .LBB61_260: # %else974
-; CHECK-RV32-NEXT:    slli a3, a2, 10
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_261
-; CHECK-RV32-NEXT:    j .LBB61_774
-; CHECK-RV32-NEXT:  .LBB61_261: # %else978
-; CHECK-RV32-NEXT:    slli a3, a2, 9
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_262
-; CHECK-RV32-NEXT:    j .LBB61_775
-; CHECK-RV32-NEXT:  .LBB61_262: # %else982
-; CHECK-RV32-NEXT:    slli a3, a2, 8
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_263
-; CHECK-RV32-NEXT:    j .LBB61_776
-; CHECK-RV32-NEXT:  .LBB61_263: # %else986
-; CHECK-RV32-NEXT:    slli a3, a2, 7
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_264
-; CHECK-RV32-NEXT:    j .LBB61_777
-; CHECK-RV32-NEXT:  .LBB61_264: # %else990
-; CHECK-RV32-NEXT:    slli a3, a2, 6
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_265
-; CHECK-RV32-NEXT:    j .LBB61_778
-; CHECK-RV32-NEXT:  .LBB61_265: # %else994
-; CHECK-RV32-NEXT:    slli a3, a2, 5
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_266
-; CHECK-RV32-NEXT:    j .LBB61_779
-; CHECK-RV32-NEXT:  .LBB61_266: # %else998
-; CHECK-RV32-NEXT:    slli a3, a2, 4
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_267
-; CHECK-RV32-NEXT:    j .LBB61_780
-; CHECK-RV32-NEXT:  .LBB61_267: # %else1002
-; CHECK-RV32-NEXT:    slli a3, a2, 3
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_268
-; CHECK-RV32-NEXT:    j .LBB61_781
-; CHECK-RV32-NEXT:  .LBB61_268: # %else1006
-; CHECK-RV32-NEXT:    slli a3, a2, 2
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_270
-; CHECK-RV32-NEXT:  .LBB61_269: # %cond.load1009
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 254
-; CHECK-RV32-NEXT:    li a4, 253
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:  .LBB61_270: # %else1010
-; CHECK-RV32-NEXT:    slli a3, a2, 1
-; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV32-NEXT:    vslidedown.vi v16, v0, 4
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_272
-; CHECK-RV32-NEXT:  # %bb.271: # %cond.load1013
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    vmv.s.x v20, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 255
-; CHECK-RV32-NEXT:    li a4, 254
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:  .LBB61_272: # %else1014
-; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.x.s a3, v16
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_273
-; CHECK-RV32-NEXT:    j .LBB61_782
-; CHECK-RV32-NEXT:  .LBB61_273: # %else1018
-; CHECK-RV32-NEXT:    andi a2, a3, 1
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_274
-; CHECK-RV32-NEXT:    j .LBB61_783
-; CHECK-RV32-NEXT:  .LBB61_274: # %else1022
-; CHECK-RV32-NEXT:    andi a2, a3, 2
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_275
-; CHECK-RV32-NEXT:    j .LBB61_784
-; CHECK-RV32-NEXT:  .LBB61_275: # %else1026
-; CHECK-RV32-NEXT:    andi a2, a3, 4
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_276
-; CHECK-RV32-NEXT:    j .LBB61_785
-; CHECK-RV32-NEXT:  .LBB61_276: # %else1030
-; CHECK-RV32-NEXT:    andi a2, a3, 8
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_277
-; CHECK-RV32-NEXT:    j .LBB61_786
-; CHECK-RV32-NEXT:  .LBB61_277: # %else1034
-; CHECK-RV32-NEXT:    andi a2, a3, 16
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_278
-; CHECK-RV32-NEXT:    j .LBB61_787
-; CHECK-RV32-NEXT:  .LBB61_278: # %else1038
-; CHECK-RV32-NEXT:    andi a2, a3, 32
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_279
-; CHECK-RV32-NEXT:    j .LBB61_788
-; CHECK-RV32-NEXT:  .LBB61_279: # %else1042
-; CHECK-RV32-NEXT:    andi a2, a3, 64
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_280
-; CHECK-RV32-NEXT:    j .LBB61_789
-; CHECK-RV32-NEXT:  .LBB61_280: # %else1046
-; CHECK-RV32-NEXT:    andi a2, a3, 128
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_281
-; CHECK-RV32-NEXT:    j .LBB61_790
-; CHECK-RV32-NEXT:  .LBB61_281: # %else1050
-; CHECK-RV32-NEXT:    andi a2, a3, 256
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_282
-; CHECK-RV32-NEXT:    j .LBB61_791
-; CHECK-RV32-NEXT:  .LBB61_282: # %else1054
-; CHECK-RV32-NEXT:    andi a2, a3, 512
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_283
-; CHECK-RV32-NEXT:    j .LBB61_792
-; CHECK-RV32-NEXT:  .LBB61_283: # %else1058
-; CHECK-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_284
-; CHECK-RV32-NEXT:    j .LBB61_793
-; CHECK-RV32-NEXT:  .LBB61_284: # %else1062
-; CHECK-RV32-NEXT:    slli a2, a3, 20
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_285
-; CHECK-RV32-NEXT:    j .LBB61_794
-; CHECK-RV32-NEXT:  .LBB61_285: # %else1066
-; CHECK-RV32-NEXT:    slli a2, a3, 19
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_286
-; CHECK-RV32-NEXT:    j .LBB61_795
-; CHECK-RV32-NEXT:  .LBB61_286: # %else1070
-; CHECK-RV32-NEXT:    slli a2, a3, 18
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_287
-; CHECK-RV32-NEXT:    j .LBB61_796
-; CHECK-RV32-NEXT:  .LBB61_287: # %else1074
-; CHECK-RV32-NEXT:    slli a2, a3, 17
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_288
-; CHECK-RV32-NEXT:    j .LBB61_797
-; CHECK-RV32-NEXT:  .LBB61_288: # %else1078
-; CHECK-RV32-NEXT:    slli a2, a3, 16
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_289
-; CHECK-RV32-NEXT:    j .LBB61_798
-; CHECK-RV32-NEXT:  .LBB61_289: # %else1082
-; CHECK-RV32-NEXT:    slli a2, a3, 15
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_290
-; CHECK-RV32-NEXT:    j .LBB61_799
-; CHECK-RV32-NEXT:  .LBB61_290: # %else1086
-; CHECK-RV32-NEXT:    slli a2, a3, 14
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_291
-; CHECK-RV32-NEXT:    j .LBB61_800
-; CHECK-RV32-NEXT:  .LBB61_291: # %else1090
-; CHECK-RV32-NEXT:    slli a2, a3, 13
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_292
-; CHECK-RV32-NEXT:    j .LBB61_801
-; CHECK-RV32-NEXT:  .LBB61_292: # %else1094
-; CHECK-RV32-NEXT:    slli a2, a3, 12
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_293
-; CHECK-RV32-NEXT:    j .LBB61_802
-; CHECK-RV32-NEXT:  .LBB61_293: # %else1098
-; CHECK-RV32-NEXT:    slli a2, a3, 11
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_294
-; CHECK-RV32-NEXT:    j .LBB61_803
-; CHECK-RV32-NEXT:  .LBB61_294: # %else1102
-; CHECK-RV32-NEXT:    slli a2, a3, 10
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_295
-; CHECK-RV32-NEXT:    j .LBB61_804
-; CHECK-RV32-NEXT:  .LBB61_295: # %else1106
-; CHECK-RV32-NEXT:    slli a2, a3, 9
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_296
-; CHECK-RV32-NEXT:    j .LBB61_805
-; CHECK-RV32-NEXT:  .LBB61_296: # %else1110
-; CHECK-RV32-NEXT:    slli a2, a3, 8
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_297
-; CHECK-RV32-NEXT:    j .LBB61_806
-; CHECK-RV32-NEXT:  .LBB61_297: # %else1114
-; CHECK-RV32-NEXT:    slli a2, a3, 7
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_298
-; CHECK-RV32-NEXT:    j .LBB61_807
-; CHECK-RV32-NEXT:  .LBB61_298: # %else1118
-; CHECK-RV32-NEXT:    slli a2, a3, 6
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_299
-; CHECK-RV32-NEXT:    j .LBB61_808
-; CHECK-RV32-NEXT:  .LBB61_299: # %else1122
-; CHECK-RV32-NEXT:    slli a2, a3, 5
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_300
-; CHECK-RV32-NEXT:    j .LBB61_809
-; CHECK-RV32-NEXT:  .LBB61_300: # %else1126
-; CHECK-RV32-NEXT:    slli a2, a3, 4
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_301
-; CHECK-RV32-NEXT:    j .LBB61_810
-; CHECK-RV32-NEXT:  .LBB61_301: # %else1130
-; CHECK-RV32-NEXT:    slli a2, a3, 3
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_302
-; CHECK-RV32-NEXT:    j .LBB61_811
-; CHECK-RV32-NEXT:  .LBB61_302: # %else1134
-; CHECK-RV32-NEXT:    slli a2, a3, 2
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_304
-; CHECK-RV32-NEXT:  .LBB61_303: # %cond.load1137
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 286
-; CHECK-RV32-NEXT:    li a4, 285
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:  .LBB61_304: # %else1138
-; CHECK-RV32-NEXT:    slli a2, a3, 1
-; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV32-NEXT:    vsrl.vx v16, v16, a1
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_306
-; CHECK-RV32-NEXT:  # %bb.305: # %cond.load1141
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 287
-; CHECK-RV32-NEXT:    li a4, 286
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:  .LBB61_306: # %else1142
-; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.x.s a2, v16
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_307
-; CHECK-RV32-NEXT:    j .LBB61_812
-; CHECK-RV32-NEXT:  .LBB61_307: # %else1146
-; CHECK-RV32-NEXT:    andi a3, a2, 1
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_308
-; CHECK-RV32-NEXT:    j .LBB61_813
-; CHECK-RV32-NEXT:  .LBB61_308: # %else1150
-; CHECK-RV32-NEXT:    andi a3, a2, 2
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_309
-; CHECK-RV32-NEXT:    j .LBB61_814
-; CHECK-RV32-NEXT:  .LBB61_309: # %else1154
-; CHECK-RV32-NEXT:    andi a3, a2, 4
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_310
-; CHECK-RV32-NEXT:    j .LBB61_815
-; CHECK-RV32-NEXT:  .LBB61_310: # %else1158
-; CHECK-RV32-NEXT:    andi a3, a2, 8
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_311
-; CHECK-RV32-NEXT:    j .LBB61_816
-; CHECK-RV32-NEXT:  .LBB61_311: # %else1162
-; CHECK-RV32-NEXT:    andi a3, a2, 16
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_312
-; CHECK-RV32-NEXT:    j .LBB61_817
-; CHECK-RV32-NEXT:  .LBB61_312: # %else1166
-; CHECK-RV32-NEXT:    andi a3, a2, 32
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_313
-; CHECK-RV32-NEXT:    j .LBB61_818
-; CHECK-RV32-NEXT:  .LBB61_313: # %else1170
-; CHECK-RV32-NEXT:    andi a3, a2, 64
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_314
-; CHECK-RV32-NEXT:    j .LBB61_819
-; CHECK-RV32-NEXT:  .LBB61_314: # %else1174
-; CHECK-RV32-NEXT:    andi a3, a2, 128
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_315
-; CHECK-RV32-NEXT:    j .LBB61_820
-; CHECK-RV32-NEXT:  .LBB61_315: # %else1178
-; CHECK-RV32-NEXT:    andi a3, a2, 256
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_316
-; CHECK-RV32-NEXT:    j .LBB61_821
-; CHECK-RV32-NEXT:  .LBB61_316: # %else1182
-; CHECK-RV32-NEXT:    andi a3, a2, 512
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_317
-; CHECK-RV32-NEXT:    j .LBB61_822
-; CHECK-RV32-NEXT:  .LBB61_317: # %else1186
-; CHECK-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_318
-; CHECK-RV32-NEXT:    j .LBB61_823
-; CHECK-RV32-NEXT:  .LBB61_318: # %else1190
-; CHECK-RV32-NEXT:    slli a3, a2, 20
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_319
-; CHECK-RV32-NEXT:    j .LBB61_824
-; CHECK-RV32-NEXT:  .LBB61_319: # %else1194
-; CHECK-RV32-NEXT:    slli a3, a2, 19
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_320
-; CHECK-RV32-NEXT:    j .LBB61_825
-; CHECK-RV32-NEXT:  .LBB61_320: # %else1198
-; CHECK-RV32-NEXT:    slli a3, a2, 18
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_321
-; CHECK-RV32-NEXT:    j .LBB61_826
-; CHECK-RV32-NEXT:  .LBB61_321: # %else1202
-; CHECK-RV32-NEXT:    slli a3, a2, 17
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_322
-; CHECK-RV32-NEXT:    j .LBB61_827
-; CHECK-RV32-NEXT:  .LBB61_322: # %else1206
-; CHECK-RV32-NEXT:    slli a3, a2, 16
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_323
-; CHECK-RV32-NEXT:    j .LBB61_828
-; CHECK-RV32-NEXT:  .LBB61_323: # %else1210
-; CHECK-RV32-NEXT:    slli a3, a2, 15
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_324
-; CHECK-RV32-NEXT:    j .LBB61_829
-; CHECK-RV32-NEXT:  .LBB61_324: # %else1214
-; CHECK-RV32-NEXT:    slli a3, a2, 14
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_325
-; CHECK-RV32-NEXT:    j .LBB61_830
-; CHECK-RV32-NEXT:  .LBB61_325: # %else1218
-; CHECK-RV32-NEXT:    slli a3, a2, 13
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_326
-; CHECK-RV32-NEXT:    j .LBB61_831
-; CHECK-RV32-NEXT:  .LBB61_326: # %else1222
-; CHECK-RV32-NEXT:    slli a3, a2, 12
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_327
-; CHECK-RV32-NEXT:    j .LBB61_832
-; CHECK-RV32-NEXT:  .LBB61_327: # %else1226
-; CHECK-RV32-NEXT:    slli a3, a2, 11
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_328
-; CHECK-RV32-NEXT:    j .LBB61_833
-; CHECK-RV32-NEXT:  .LBB61_328: # %else1230
-; CHECK-RV32-NEXT:    slli a3, a2, 10
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_329
-; CHECK-RV32-NEXT:    j .LBB61_834
-; CHECK-RV32-NEXT:  .LBB61_329: # %else1234
-; CHECK-RV32-NEXT:    slli a3, a2, 9
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_330
-; CHECK-RV32-NEXT:    j .LBB61_835
-; CHECK-RV32-NEXT:  .LBB61_330: # %else1238
-; CHECK-RV32-NEXT:    slli a3, a2, 8
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_331
-; CHECK-RV32-NEXT:    j .LBB61_836
-; CHECK-RV32-NEXT:  .LBB61_331: # %else1242
-; CHECK-RV32-NEXT:    slli a3, a2, 7
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_332
-; CHECK-RV32-NEXT:    j .LBB61_837
-; CHECK-RV32-NEXT:  .LBB61_332: # %else1246
-; CHECK-RV32-NEXT:    slli a3, a2, 6
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_333
-; CHECK-RV32-NEXT:    j .LBB61_838
-; CHECK-RV32-NEXT:  .LBB61_333: # %else1250
-; CHECK-RV32-NEXT:    slli a3, a2, 5
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_334
-; CHECK-RV32-NEXT:    j .LBB61_839
-; CHECK-RV32-NEXT:  .LBB61_334: # %else1254
-; CHECK-RV32-NEXT:    slli a3, a2, 4
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_335
-; CHECK-RV32-NEXT:    j .LBB61_840
-; CHECK-RV32-NEXT:  .LBB61_335: # %else1258
-; CHECK-RV32-NEXT:    slli a3, a2, 3
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_336
-; CHECK-RV32-NEXT:    j .LBB61_841
-; CHECK-RV32-NEXT:  .LBB61_336: # %else1262
-; CHECK-RV32-NEXT:    slli a3, a2, 2
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_338
-; CHECK-RV32-NEXT:  .LBB61_337: # %cond.load1265
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 318
-; CHECK-RV32-NEXT:    li a4, 317
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:  .LBB61_338: # %else1266
-; CHECK-RV32-NEXT:    slli a3, a2, 1
-; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV32-NEXT:    vslidedown.vi v16, v0, 5
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_340
-; CHECK-RV32-NEXT:  # %bb.339: # %cond.load1269
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    li a3, 319
-; CHECK-RV32-NEXT:    li a4, 318
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:  .LBB61_340: # %else1270
-; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.x.s a3, v16
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_341
-; CHECK-RV32-NEXT:    j .LBB61_842
-; CHECK-RV32-NEXT:  .LBB61_341: # %else1274
-; CHECK-RV32-NEXT:    andi a2, a3, 1
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_342
-; CHECK-RV32-NEXT:    j .LBB61_843
-; CHECK-RV32-NEXT:  .LBB61_342: # %else1278
-; CHECK-RV32-NEXT:    andi a2, a3, 2
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_343
-; CHECK-RV32-NEXT:    j .LBB61_844
-; CHECK-RV32-NEXT:  .LBB61_343: # %else1282
-; CHECK-RV32-NEXT:    andi a2, a3, 4
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_344
-; CHECK-RV32-NEXT:    j .LBB61_845
-; CHECK-RV32-NEXT:  .LBB61_344: # %else1286
-; CHECK-RV32-NEXT:    andi a2, a3, 8
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_345
-; CHECK-RV32-NEXT:    j .LBB61_846
-; CHECK-RV32-NEXT:  .LBB61_345: # %else1290
-; CHECK-RV32-NEXT:    andi a2, a3, 16
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_346
-; CHECK-RV32-NEXT:    j .LBB61_847
-; CHECK-RV32-NEXT:  .LBB61_346: # %else1294
-; CHECK-RV32-NEXT:    andi a2, a3, 32
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_347
-; CHECK-RV32-NEXT:    j .LBB61_848
-; CHECK-RV32-NEXT:  .LBB61_347: # %else1298
-; CHECK-RV32-NEXT:    andi a2, a3, 64
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_348
-; CHECK-RV32-NEXT:    j .LBB61_849
-; CHECK-RV32-NEXT:  .LBB61_348: # %else1302
-; CHECK-RV32-NEXT:    andi a2, a3, 128
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_349
-; CHECK-RV32-NEXT:    j .LBB61_850
-; CHECK-RV32-NEXT:  .LBB61_349: # %else1306
-; CHECK-RV32-NEXT:    andi a2, a3, 256
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_350
-; CHECK-RV32-NEXT:    j .LBB61_851
-; CHECK-RV32-NEXT:  .LBB61_350: # %else1310
-; CHECK-RV32-NEXT:    andi a2, a3, 512
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_351
-; CHECK-RV32-NEXT:    j .LBB61_852
-; CHECK-RV32-NEXT:  .LBB61_351: # %else1314
-; CHECK-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_352
-; CHECK-RV32-NEXT:    j .LBB61_853
-; CHECK-RV32-NEXT:  .LBB61_352: # %else1318
-; CHECK-RV32-NEXT:    slli a2, a3, 20
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_353
-; CHECK-RV32-NEXT:    j .LBB61_854
-; CHECK-RV32-NEXT:  .LBB61_353: # %else1322
-; CHECK-RV32-NEXT:    slli a2, a3, 19
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_354
-; CHECK-RV32-NEXT:    j .LBB61_855
-; CHECK-RV32-NEXT:  .LBB61_354: # %else1326
-; CHECK-RV32-NEXT:    slli a2, a3, 18
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_355
-; CHECK-RV32-NEXT:    j .LBB61_856
-; CHECK-RV32-NEXT:  .LBB61_355: # %else1330
-; CHECK-RV32-NEXT:    slli a2, a3, 17
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_356
-; CHECK-RV32-NEXT:    j .LBB61_857
-; CHECK-RV32-NEXT:  .LBB61_356: # %else1334
-; CHECK-RV32-NEXT:    slli a2, a3, 16
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_357
-; CHECK-RV32-NEXT:    j .LBB61_858
-; CHECK-RV32-NEXT:  .LBB61_357: # %else1338
-; CHECK-RV32-NEXT:    slli a2, a3, 15
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_358
-; CHECK-RV32-NEXT:    j .LBB61_859
-; CHECK-RV32-NEXT:  .LBB61_358: # %else1342
-; CHECK-RV32-NEXT:    slli a2, a3, 14
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_359
-; CHECK-RV32-NEXT:    j .LBB61_860
-; CHECK-RV32-NEXT:  .LBB61_359: # %else1346
-; CHECK-RV32-NEXT:    slli a2, a3, 13
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_360
-; CHECK-RV32-NEXT:    j .LBB61_861
-; CHECK-RV32-NEXT:  .LBB61_360: # %else1350
-; CHECK-RV32-NEXT:    slli a2, a3, 12
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_361
-; CHECK-RV32-NEXT:    j .LBB61_862
-; CHECK-RV32-NEXT:  .LBB61_361: # %else1354
-; CHECK-RV32-NEXT:    slli a2, a3, 11
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_362
-; CHECK-RV32-NEXT:    j .LBB61_863
-; CHECK-RV32-NEXT:  .LBB61_362: # %else1358
-; CHECK-RV32-NEXT:    slli a2, a3, 10
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_363
-; CHECK-RV32-NEXT:    j .LBB61_864
-; CHECK-RV32-NEXT:  .LBB61_363: # %else1362
-; CHECK-RV32-NEXT:    slli a2, a3, 9
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_364
-; CHECK-RV32-NEXT:    j .LBB61_865
-; CHECK-RV32-NEXT:  .LBB61_364: # %else1366
-; CHECK-RV32-NEXT:    slli a2, a3, 8
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_365
-; CHECK-RV32-NEXT:    j .LBB61_866
-; CHECK-RV32-NEXT:  .LBB61_365: # %else1370
-; CHECK-RV32-NEXT:    slli a2, a3, 7
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_366
-; CHECK-RV32-NEXT:    j .LBB61_867
-; CHECK-RV32-NEXT:  .LBB61_366: # %else1374
-; CHECK-RV32-NEXT:    slli a2, a3, 6
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_367
-; CHECK-RV32-NEXT:    j .LBB61_868
-; CHECK-RV32-NEXT:  .LBB61_367: # %else1378
-; CHECK-RV32-NEXT:    slli a2, a3, 5
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_368
-; CHECK-RV32-NEXT:    j .LBB61_869
-; CHECK-RV32-NEXT:  .LBB61_368: # %else1382
-; CHECK-RV32-NEXT:    slli a2, a3, 4
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_369
-; CHECK-RV32-NEXT:    j .LBB61_870
-; CHECK-RV32-NEXT:  .LBB61_369: # %else1386
-; CHECK-RV32-NEXT:    slli a2, a3, 3
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_370
-; CHECK-RV32-NEXT:    j .LBB61_871
-; CHECK-RV32-NEXT:  .LBB61_370: # %else1390
-; CHECK-RV32-NEXT:    slli a2, a3, 2
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_372
-; CHECK-RV32-NEXT:  .LBB61_371: # %cond.load1393
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 350
-; CHECK-RV32-NEXT:    li a4, 349
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:  .LBB61_372: # %else1394
-; CHECK-RV32-NEXT:    slli a2, a3, 1
-; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV32-NEXT:    vsrl.vx v16, v16, a1
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_374
-; CHECK-RV32-NEXT:  # %bb.373: # %cond.load1397
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 351
-; CHECK-RV32-NEXT:    li a4, 350
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:  .LBB61_374: # %else1398
-; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.x.s a2, v16
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_375
-; CHECK-RV32-NEXT:    j .LBB61_872
-; CHECK-RV32-NEXT:  .LBB61_375: # %else1402
-; CHECK-RV32-NEXT:    andi a3, a2, 1
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_376
-; CHECK-RV32-NEXT:    j .LBB61_873
-; CHECK-RV32-NEXT:  .LBB61_376: # %else1406
-; CHECK-RV32-NEXT:    andi a3, a2, 2
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_377
-; CHECK-RV32-NEXT:    j .LBB61_874
-; CHECK-RV32-NEXT:  .LBB61_377: # %else1410
-; CHECK-RV32-NEXT:    andi a3, a2, 4
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_378
-; CHECK-RV32-NEXT:    j .LBB61_875
-; CHECK-RV32-NEXT:  .LBB61_378: # %else1414
-; CHECK-RV32-NEXT:    andi a3, a2, 8
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_379
-; CHECK-RV32-NEXT:    j .LBB61_876
-; CHECK-RV32-NEXT:  .LBB61_379: # %else1418
-; CHECK-RV32-NEXT:    andi a3, a2, 16
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_380
-; CHECK-RV32-NEXT:    j .LBB61_877
-; CHECK-RV32-NEXT:  .LBB61_380: # %else1422
-; CHECK-RV32-NEXT:    andi a3, a2, 32
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_381
-; CHECK-RV32-NEXT:    j .LBB61_878
-; CHECK-RV32-NEXT:  .LBB61_381: # %else1426
-; CHECK-RV32-NEXT:    andi a3, a2, 64
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_382
-; CHECK-RV32-NEXT:    j .LBB61_879
-; CHECK-RV32-NEXT:  .LBB61_382: # %else1430
-; CHECK-RV32-NEXT:    andi a3, a2, 128
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_383
-; CHECK-RV32-NEXT:    j .LBB61_880
-; CHECK-RV32-NEXT:  .LBB61_383: # %else1434
-; CHECK-RV32-NEXT:    andi a3, a2, 256
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_384
-; CHECK-RV32-NEXT:    j .LBB61_881
-; CHECK-RV32-NEXT:  .LBB61_384: # %else1438
-; CHECK-RV32-NEXT:    andi a3, a2, 512
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_385
-; CHECK-RV32-NEXT:    j .LBB61_882
-; CHECK-RV32-NEXT:  .LBB61_385: # %else1442
-; CHECK-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_386
-; CHECK-RV32-NEXT:    j .LBB61_883
-; CHECK-RV32-NEXT:  .LBB61_386: # %else1446
-; CHECK-RV32-NEXT:    slli a3, a2, 20
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_387
-; CHECK-RV32-NEXT:    j .LBB61_884
-; CHECK-RV32-NEXT:  .LBB61_387: # %else1450
-; CHECK-RV32-NEXT:    slli a3, a2, 19
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_388
-; CHECK-RV32-NEXT:    j .LBB61_885
-; CHECK-RV32-NEXT:  .LBB61_388: # %else1454
-; CHECK-RV32-NEXT:    slli a3, a2, 18
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_389
-; CHECK-RV32-NEXT:    j .LBB61_886
-; CHECK-RV32-NEXT:  .LBB61_389: # %else1458
-; CHECK-RV32-NEXT:    slli a3, a2, 17
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_390
-; CHECK-RV32-NEXT:    j .LBB61_887
-; CHECK-RV32-NEXT:  .LBB61_390: # %else1462
-; CHECK-RV32-NEXT:    slli a3, a2, 16
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_391
-; CHECK-RV32-NEXT:    j .LBB61_888
-; CHECK-RV32-NEXT:  .LBB61_391: # %else1466
-; CHECK-RV32-NEXT:    slli a3, a2, 15
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_392
-; CHECK-RV32-NEXT:    j .LBB61_889
-; CHECK-RV32-NEXT:  .LBB61_392: # %else1470
-; CHECK-RV32-NEXT:    slli a3, a2, 14
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_393
-; CHECK-RV32-NEXT:    j .LBB61_890
-; CHECK-RV32-NEXT:  .LBB61_393: # %else1474
-; CHECK-RV32-NEXT:    slli a3, a2, 13
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_394
-; CHECK-RV32-NEXT:    j .LBB61_891
-; CHECK-RV32-NEXT:  .LBB61_394: # %else1478
-; CHECK-RV32-NEXT:    slli a3, a2, 12
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_395
-; CHECK-RV32-NEXT:    j .LBB61_892
-; CHECK-RV32-NEXT:  .LBB61_395: # %else1482
-; CHECK-RV32-NEXT:    slli a3, a2, 11
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_396
-; CHECK-RV32-NEXT:    j .LBB61_893
-; CHECK-RV32-NEXT:  .LBB61_396: # %else1486
-; CHECK-RV32-NEXT:    slli a3, a2, 10
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_397
-; CHECK-RV32-NEXT:    j .LBB61_894
-; CHECK-RV32-NEXT:  .LBB61_397: # %else1490
-; CHECK-RV32-NEXT:    slli a3, a2, 9
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_398
-; CHECK-RV32-NEXT:    j .LBB61_895
-; CHECK-RV32-NEXT:  .LBB61_398: # %else1494
-; CHECK-RV32-NEXT:    slli a3, a2, 8
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_399
-; CHECK-RV32-NEXT:    j .LBB61_896
-; CHECK-RV32-NEXT:  .LBB61_399: # %else1498
-; CHECK-RV32-NEXT:    slli a3, a2, 7
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_400
-; CHECK-RV32-NEXT:    j .LBB61_897
-; CHECK-RV32-NEXT:  .LBB61_400: # %else1502
-; CHECK-RV32-NEXT:    slli a3, a2, 6
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_401
-; CHECK-RV32-NEXT:    j .LBB61_898
-; CHECK-RV32-NEXT:  .LBB61_401: # %else1506
-; CHECK-RV32-NEXT:    slli a3, a2, 5
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_402
-; CHECK-RV32-NEXT:    j .LBB61_899
-; CHECK-RV32-NEXT:  .LBB61_402: # %else1510
-; CHECK-RV32-NEXT:    slli a3, a2, 4
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_403
-; CHECK-RV32-NEXT:    j .LBB61_900
-; CHECK-RV32-NEXT:  .LBB61_403: # %else1514
-; CHECK-RV32-NEXT:    slli a3, a2, 3
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_404
-; CHECK-RV32-NEXT:    j .LBB61_901
-; CHECK-RV32-NEXT:  .LBB61_404: # %else1518
-; CHECK-RV32-NEXT:    slli a3, a2, 2
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_406
-; CHECK-RV32-NEXT:  .LBB61_405: # %cond.load1521
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 382
-; CHECK-RV32-NEXT:    li a4, 381
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:  .LBB61_406: # %else1522
-; CHECK-RV32-NEXT:    slli a3, a2, 1
-; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV32-NEXT:    vslidedown.vi v16, v0, 6
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_408
-; CHECK-RV32-NEXT:  # %bb.407: # %cond.load1525
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    li a3, 383
-; CHECK-RV32-NEXT:    li a4, 382
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:  .LBB61_408: # %else1526
-; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.x.s a3, v16
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_409
-; CHECK-RV32-NEXT:    j .LBB61_902
-; CHECK-RV32-NEXT:  .LBB61_409: # %else1530
-; CHECK-RV32-NEXT:    andi a2, a3, 1
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_410
-; CHECK-RV32-NEXT:    j .LBB61_903
-; CHECK-RV32-NEXT:  .LBB61_410: # %else1534
-; CHECK-RV32-NEXT:    andi a2, a3, 2
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_411
-; CHECK-RV32-NEXT:    j .LBB61_904
-; CHECK-RV32-NEXT:  .LBB61_411: # %else1538
-; CHECK-RV32-NEXT:    andi a2, a3, 4
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_412
-; CHECK-RV32-NEXT:    j .LBB61_905
-; CHECK-RV32-NEXT:  .LBB61_412: # %else1542
-; CHECK-RV32-NEXT:    andi a2, a3, 8
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_413
-; CHECK-RV32-NEXT:    j .LBB61_906
-; CHECK-RV32-NEXT:  .LBB61_413: # %else1546
-; CHECK-RV32-NEXT:    andi a2, a3, 16
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_414
-; CHECK-RV32-NEXT:    j .LBB61_907
-; CHECK-RV32-NEXT:  .LBB61_414: # %else1550
-; CHECK-RV32-NEXT:    andi a2, a3, 32
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_415
-; CHECK-RV32-NEXT:    j .LBB61_908
-; CHECK-RV32-NEXT:  .LBB61_415: # %else1554
-; CHECK-RV32-NEXT:    andi a2, a3, 64
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_416
-; CHECK-RV32-NEXT:    j .LBB61_909
-; CHECK-RV32-NEXT:  .LBB61_416: # %else1558
-; CHECK-RV32-NEXT:    andi a2, a3, 128
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_417
-; CHECK-RV32-NEXT:    j .LBB61_910
-; CHECK-RV32-NEXT:  .LBB61_417: # %else1562
-; CHECK-RV32-NEXT:    andi a2, a3, 256
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_418
-; CHECK-RV32-NEXT:    j .LBB61_911
-; CHECK-RV32-NEXT:  .LBB61_418: # %else1566
-; CHECK-RV32-NEXT:    andi a2, a3, 512
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_419
-; CHECK-RV32-NEXT:    j .LBB61_912
-; CHECK-RV32-NEXT:  .LBB61_419: # %else1570
-; CHECK-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_420
-; CHECK-RV32-NEXT:    j .LBB61_913
-; CHECK-RV32-NEXT:  .LBB61_420: # %else1574
-; CHECK-RV32-NEXT:    slli a2, a3, 20
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_421
-; CHECK-RV32-NEXT:    j .LBB61_914
-; CHECK-RV32-NEXT:  .LBB61_421: # %else1578
-; CHECK-RV32-NEXT:    slli a2, a3, 19
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_422
-; CHECK-RV32-NEXT:    j .LBB61_915
-; CHECK-RV32-NEXT:  .LBB61_422: # %else1582
-; CHECK-RV32-NEXT:    slli a2, a3, 18
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_423
-; CHECK-RV32-NEXT:    j .LBB61_916
-; CHECK-RV32-NEXT:  .LBB61_423: # %else1586
-; CHECK-RV32-NEXT:    slli a2, a3, 17
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_424
-; CHECK-RV32-NEXT:    j .LBB61_917
-; CHECK-RV32-NEXT:  .LBB61_424: # %else1590
-; CHECK-RV32-NEXT:    slli a2, a3, 16
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_425
-; CHECK-RV32-NEXT:    j .LBB61_918
-; CHECK-RV32-NEXT:  .LBB61_425: # %else1594
-; CHECK-RV32-NEXT:    slli a2, a3, 15
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_426
-; CHECK-RV32-NEXT:    j .LBB61_919
-; CHECK-RV32-NEXT:  .LBB61_426: # %else1598
-; CHECK-RV32-NEXT:    slli a2, a3, 14
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_427
-; CHECK-RV32-NEXT:    j .LBB61_920
-; CHECK-RV32-NEXT:  .LBB61_427: # %else1602
-; CHECK-RV32-NEXT:    slli a2, a3, 13
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_428
-; CHECK-RV32-NEXT:    j .LBB61_921
-; CHECK-RV32-NEXT:  .LBB61_428: # %else1606
-; CHECK-RV32-NEXT:    slli a2, a3, 12
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_429
-; CHECK-RV32-NEXT:    j .LBB61_922
-; CHECK-RV32-NEXT:  .LBB61_429: # %else1610
-; CHECK-RV32-NEXT:    slli a2, a3, 11
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_430
-; CHECK-RV32-NEXT:    j .LBB61_923
-; CHECK-RV32-NEXT:  .LBB61_430: # %else1614
-; CHECK-RV32-NEXT:    slli a2, a3, 10
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_431
-; CHECK-RV32-NEXT:    j .LBB61_924
-; CHECK-RV32-NEXT:  .LBB61_431: # %else1618
-; CHECK-RV32-NEXT:    slli a2, a3, 9
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_432
-; CHECK-RV32-NEXT:    j .LBB61_925
-; CHECK-RV32-NEXT:  .LBB61_432: # %else1622
-; CHECK-RV32-NEXT:    slli a2, a3, 8
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_433
-; CHECK-RV32-NEXT:    j .LBB61_926
-; CHECK-RV32-NEXT:  .LBB61_433: # %else1626
-; CHECK-RV32-NEXT:    slli a2, a3, 7
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_434
-; CHECK-RV32-NEXT:    j .LBB61_927
-; CHECK-RV32-NEXT:  .LBB61_434: # %else1630
-; CHECK-RV32-NEXT:    slli a2, a3, 6
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_435
-; CHECK-RV32-NEXT:    j .LBB61_928
-; CHECK-RV32-NEXT:  .LBB61_435: # %else1634
-; CHECK-RV32-NEXT:    slli a2, a3, 5
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_436
-; CHECK-RV32-NEXT:    j .LBB61_929
-; CHECK-RV32-NEXT:  .LBB61_436: # %else1638
-; CHECK-RV32-NEXT:    slli a2, a3, 4
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_437
-; CHECK-RV32-NEXT:    j .LBB61_930
-; CHECK-RV32-NEXT:  .LBB61_437: # %else1642
-; CHECK-RV32-NEXT:    slli a2, a3, 3
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_438
-; CHECK-RV32-NEXT:    j .LBB61_931
-; CHECK-RV32-NEXT:  .LBB61_438: # %else1646
-; CHECK-RV32-NEXT:    slli a2, a3, 2
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_440
-; CHECK-RV32-NEXT:  .LBB61_439: # %cond.load1649
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 414
-; CHECK-RV32-NEXT:    li a4, 413
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:  .LBB61_440: # %else1650
-; CHECK-RV32-NEXT:    slli a2, a3, 1
-; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV32-NEXT:    vsrl.vx v16, v16, a1
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_442
-; CHECK-RV32-NEXT:  # %bb.441: # %cond.load1653
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 415
-; CHECK-RV32-NEXT:    li a4, 414
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:  .LBB61_442: # %else1654
-; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.x.s a2, v16
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_443
-; CHECK-RV32-NEXT:    j .LBB61_932
-; CHECK-RV32-NEXT:  .LBB61_443: # %else1658
-; CHECK-RV32-NEXT:    andi a3, a2, 1
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_444
-; CHECK-RV32-NEXT:    j .LBB61_933
-; CHECK-RV32-NEXT:  .LBB61_444: # %else1662
-; CHECK-RV32-NEXT:    andi a3, a2, 2
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_445
-; CHECK-RV32-NEXT:    j .LBB61_934
-; CHECK-RV32-NEXT:  .LBB61_445: # %else1666
-; CHECK-RV32-NEXT:    andi a3, a2, 4
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_446
-; CHECK-RV32-NEXT:    j .LBB61_935
-; CHECK-RV32-NEXT:  .LBB61_446: # %else1670
-; CHECK-RV32-NEXT:    andi a3, a2, 8
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_447
-; CHECK-RV32-NEXT:    j .LBB61_936
-; CHECK-RV32-NEXT:  .LBB61_447: # %else1674
-; CHECK-RV32-NEXT:    andi a3, a2, 16
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_448
-; CHECK-RV32-NEXT:    j .LBB61_937
-; CHECK-RV32-NEXT:  .LBB61_448: # %else1678
-; CHECK-RV32-NEXT:    andi a3, a2, 32
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_449
-; CHECK-RV32-NEXT:    j .LBB61_938
-; CHECK-RV32-NEXT:  .LBB61_449: # %else1682
-; CHECK-RV32-NEXT:    andi a3, a2, 64
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_450
-; CHECK-RV32-NEXT:    j .LBB61_939
-; CHECK-RV32-NEXT:  .LBB61_450: # %else1686
-; CHECK-RV32-NEXT:    andi a3, a2, 128
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_451
-; CHECK-RV32-NEXT:    j .LBB61_940
-; CHECK-RV32-NEXT:  .LBB61_451: # %else1690
-; CHECK-RV32-NEXT:    andi a3, a2, 256
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_452
-; CHECK-RV32-NEXT:    j .LBB61_941
-; CHECK-RV32-NEXT:  .LBB61_452: # %else1694
-; CHECK-RV32-NEXT:    andi a3, a2, 512
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_453
-; CHECK-RV32-NEXT:    j .LBB61_942
-; CHECK-RV32-NEXT:  .LBB61_453: # %else1698
-; CHECK-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-RV32-NEXT:    beqz a3, .LBB61_454
-; CHECK-RV32-NEXT:    j .LBB61_943
-; CHECK-RV32-NEXT:  .LBB61_454: # %else1702
-; CHECK-RV32-NEXT:    slli a3, a2, 20
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_455
-; CHECK-RV32-NEXT:    j .LBB61_944
-; CHECK-RV32-NEXT:  .LBB61_455: # %else1706
-; CHECK-RV32-NEXT:    slli a3, a2, 19
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_456
-; CHECK-RV32-NEXT:    j .LBB61_945
-; CHECK-RV32-NEXT:  .LBB61_456: # %else1710
-; CHECK-RV32-NEXT:    slli a3, a2, 18
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_457
-; CHECK-RV32-NEXT:    j .LBB61_946
-; CHECK-RV32-NEXT:  .LBB61_457: # %else1714
-; CHECK-RV32-NEXT:    slli a3, a2, 17
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_458
-; CHECK-RV32-NEXT:    j .LBB61_947
-; CHECK-RV32-NEXT:  .LBB61_458: # %else1718
-; CHECK-RV32-NEXT:    slli a3, a2, 16
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_459
-; CHECK-RV32-NEXT:    j .LBB61_948
-; CHECK-RV32-NEXT:  .LBB61_459: # %else1722
-; CHECK-RV32-NEXT:    slli a3, a2, 15
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_460
-; CHECK-RV32-NEXT:    j .LBB61_949
-; CHECK-RV32-NEXT:  .LBB61_460: # %else1726
-; CHECK-RV32-NEXT:    slli a3, a2, 14
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_461
-; CHECK-RV32-NEXT:    j .LBB61_950
-; CHECK-RV32-NEXT:  .LBB61_461: # %else1730
-; CHECK-RV32-NEXT:    slli a3, a2, 13
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_462
-; CHECK-RV32-NEXT:    j .LBB61_951
-; CHECK-RV32-NEXT:  .LBB61_462: # %else1734
-; CHECK-RV32-NEXT:    slli a3, a2, 12
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_463
-; CHECK-RV32-NEXT:    j .LBB61_952
-; CHECK-RV32-NEXT:  .LBB61_463: # %else1738
-; CHECK-RV32-NEXT:    slli a3, a2, 11
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_464
-; CHECK-RV32-NEXT:    j .LBB61_953
-; CHECK-RV32-NEXT:  .LBB61_464: # %else1742
-; CHECK-RV32-NEXT:    slli a3, a2, 10
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_465
-; CHECK-RV32-NEXT:    j .LBB61_954
-; CHECK-RV32-NEXT:  .LBB61_465: # %else1746
-; CHECK-RV32-NEXT:    slli a3, a2, 9
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_466
-; CHECK-RV32-NEXT:    j .LBB61_955
-; CHECK-RV32-NEXT:  .LBB61_466: # %else1750
-; CHECK-RV32-NEXT:    slli a3, a2, 8
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_467
-; CHECK-RV32-NEXT:    j .LBB61_956
-; CHECK-RV32-NEXT:  .LBB61_467: # %else1754
-; CHECK-RV32-NEXT:    slli a3, a2, 7
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_468
-; CHECK-RV32-NEXT:    j .LBB61_957
-; CHECK-RV32-NEXT:  .LBB61_468: # %else1758
-; CHECK-RV32-NEXT:    slli a3, a2, 6
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_469
-; CHECK-RV32-NEXT:    j .LBB61_958
-; CHECK-RV32-NEXT:  .LBB61_469: # %else1762
-; CHECK-RV32-NEXT:    slli a3, a2, 5
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_470
-; CHECK-RV32-NEXT:    j .LBB61_959
-; CHECK-RV32-NEXT:  .LBB61_470: # %else1766
-; CHECK-RV32-NEXT:    slli a3, a2, 4
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_471
-; CHECK-RV32-NEXT:    j .LBB61_960
-; CHECK-RV32-NEXT:  .LBB61_471: # %else1770
-; CHECK-RV32-NEXT:    slli a3, a2, 3
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_472
-; CHECK-RV32-NEXT:    j .LBB61_961
-; CHECK-RV32-NEXT:  .LBB61_472: # %else1774
-; CHECK-RV32-NEXT:    slli a3, a2, 2
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_474
-; CHECK-RV32-NEXT:  .LBB61_473: # %cond.load1777
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 446
-; CHECK-RV32-NEXT:    li a4, 445
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:  .LBB61_474: # %else1778
-; CHECK-RV32-NEXT:    slli a3, a2, 1
-; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV32-NEXT:    vslidedown.vi v16, v0, 7
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_476
-; CHECK-RV32-NEXT:  # %bb.475: # %cond.load1781
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    li a3, 447
-; CHECK-RV32-NEXT:    li a4, 446
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:  .LBB61_476: # %else1782
-; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.x.s a3, v16
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_477
-; CHECK-RV32-NEXT:    j .LBB61_962
-; CHECK-RV32-NEXT:  .LBB61_477: # %else1786
-; CHECK-RV32-NEXT:    andi a2, a3, 1
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_478
-; CHECK-RV32-NEXT:    j .LBB61_963
-; CHECK-RV32-NEXT:  .LBB61_478: # %else1790
-; CHECK-RV32-NEXT:    andi a2, a3, 2
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_479
-; CHECK-RV32-NEXT:    j .LBB61_964
-; CHECK-RV32-NEXT:  .LBB61_479: # %else1794
-; CHECK-RV32-NEXT:    andi a2, a3, 4
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_480
-; CHECK-RV32-NEXT:    j .LBB61_965
-; CHECK-RV32-NEXT:  .LBB61_480: # %else1798
-; CHECK-RV32-NEXT:    andi a2, a3, 8
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_481
-; CHECK-RV32-NEXT:    j .LBB61_966
-; CHECK-RV32-NEXT:  .LBB61_481: # %else1802
-; CHECK-RV32-NEXT:    andi a2, a3, 16
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_482
-; CHECK-RV32-NEXT:    j .LBB61_967
-; CHECK-RV32-NEXT:  .LBB61_482: # %else1806
-; CHECK-RV32-NEXT:    andi a2, a3, 32
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_483
-; CHECK-RV32-NEXT:    j .LBB61_968
-; CHECK-RV32-NEXT:  .LBB61_483: # %else1810
-; CHECK-RV32-NEXT:    andi a2, a3, 64
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_484
-; CHECK-RV32-NEXT:    j .LBB61_969
-; CHECK-RV32-NEXT:  .LBB61_484: # %else1814
-; CHECK-RV32-NEXT:    andi a2, a3, 128
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_485
-; CHECK-RV32-NEXT:    j .LBB61_970
-; CHECK-RV32-NEXT:  .LBB61_485: # %else1818
-; CHECK-RV32-NEXT:    andi a2, a3, 256
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_486
-; CHECK-RV32-NEXT:    j .LBB61_971
-; CHECK-RV32-NEXT:  .LBB61_486: # %else1822
-; CHECK-RV32-NEXT:    andi a2, a3, 512
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_487
-; CHECK-RV32-NEXT:    j .LBB61_972
-; CHECK-RV32-NEXT:  .LBB61_487: # %else1826
-; CHECK-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_488
-; CHECK-RV32-NEXT:    j .LBB61_973
-; CHECK-RV32-NEXT:  .LBB61_488: # %else1830
-; CHECK-RV32-NEXT:    slli a2, a3, 20
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_489
-; CHECK-RV32-NEXT:    j .LBB61_974
-; CHECK-RV32-NEXT:  .LBB61_489: # %else1834
-; CHECK-RV32-NEXT:    slli a2, a3, 19
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_490
-; CHECK-RV32-NEXT:    j .LBB61_975
-; CHECK-RV32-NEXT:  .LBB61_490: # %else1838
-; CHECK-RV32-NEXT:    slli a2, a3, 18
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_491
-; CHECK-RV32-NEXT:    j .LBB61_976
-; CHECK-RV32-NEXT:  .LBB61_491: # %else1842
-; CHECK-RV32-NEXT:    slli a2, a3, 17
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_492
-; CHECK-RV32-NEXT:    j .LBB61_977
-; CHECK-RV32-NEXT:  .LBB61_492: # %else1846
-; CHECK-RV32-NEXT:    slli a2, a3, 16
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_493
-; CHECK-RV32-NEXT:    j .LBB61_978
-; CHECK-RV32-NEXT:  .LBB61_493: # %else1850
-; CHECK-RV32-NEXT:    slli a2, a3, 15
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_494
-; CHECK-RV32-NEXT:    j .LBB61_979
-; CHECK-RV32-NEXT:  .LBB61_494: # %else1854
-; CHECK-RV32-NEXT:    slli a2, a3, 14
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_495
-; CHECK-RV32-NEXT:    j .LBB61_980
-; CHECK-RV32-NEXT:  .LBB61_495: # %else1858
-; CHECK-RV32-NEXT:    slli a2, a3, 13
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_496
-; CHECK-RV32-NEXT:    j .LBB61_981
-; CHECK-RV32-NEXT:  .LBB61_496: # %else1862
-; CHECK-RV32-NEXT:    slli a2, a3, 12
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_497
-; CHECK-RV32-NEXT:    j .LBB61_982
-; CHECK-RV32-NEXT:  .LBB61_497: # %else1866
-; CHECK-RV32-NEXT:    slli a2, a3, 11
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_498
-; CHECK-RV32-NEXT:    j .LBB61_983
-; CHECK-RV32-NEXT:  .LBB61_498: # %else1870
-; CHECK-RV32-NEXT:    slli a2, a3, 10
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_499
-; CHECK-RV32-NEXT:    j .LBB61_984
-; CHECK-RV32-NEXT:  .LBB61_499: # %else1874
-; CHECK-RV32-NEXT:    slli a2, a3, 9
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_500
-; CHECK-RV32-NEXT:    j .LBB61_985
-; CHECK-RV32-NEXT:  .LBB61_500: # %else1878
-; CHECK-RV32-NEXT:    slli a2, a3, 8
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_501
-; CHECK-RV32-NEXT:    j .LBB61_986
-; CHECK-RV32-NEXT:  .LBB61_501: # %else1882
-; CHECK-RV32-NEXT:    slli a2, a3, 7
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_502
-; CHECK-RV32-NEXT:    j .LBB61_987
-; CHECK-RV32-NEXT:  .LBB61_502: # %else1886
-; CHECK-RV32-NEXT:    slli a2, a3, 6
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_503
-; CHECK-RV32-NEXT:    j .LBB61_988
-; CHECK-RV32-NEXT:  .LBB61_503: # %else1890
-; CHECK-RV32-NEXT:    slli a2, a3, 5
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_504
-; CHECK-RV32-NEXT:    j .LBB61_989
-; CHECK-RV32-NEXT:  .LBB61_504: # %else1894
-; CHECK-RV32-NEXT:    slli a2, a3, 4
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_505
-; CHECK-RV32-NEXT:    j .LBB61_990
-; CHECK-RV32-NEXT:  .LBB61_505: # %else1898
-; CHECK-RV32-NEXT:    slli a2, a3, 3
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_506
-; CHECK-RV32-NEXT:    j .LBB61_991
-; CHECK-RV32-NEXT:  .LBB61_506: # %else1902
-; CHECK-RV32-NEXT:    slli a2, a3, 2
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_508
-; CHECK-RV32-NEXT:  .LBB61_507: # %cond.load1905
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 478
-; CHECK-RV32-NEXT:    li a4, 477
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:  .LBB61_508: # %else1906
-; CHECK-RV32-NEXT:    slli a2, a3, 1
-; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV32-NEXT:    vsrl.vx v16, v16, a1
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_510
-; CHECK-RV32-NEXT:  # %bb.509: # %cond.load1909
-; CHECK-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-RV32-NEXT:    vmv.s.x v24, a1
-; CHECK-RV32-NEXT:    li a1, 479
-; CHECK-RV32-NEXT:    li a2, 478
-; CHECK-RV32-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a2
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:  .LBB61_510: # %else1910
-; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.x.s a1, v16
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_511
-; CHECK-RV32-NEXT:    j .LBB61_992
-; CHECK-RV32-NEXT:  .LBB61_511: # %else1914
-; CHECK-RV32-NEXT:    andi a2, a1, 1
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_512
-; CHECK-RV32-NEXT:    j .LBB61_993
-; CHECK-RV32-NEXT:  .LBB61_512: # %else1918
-; CHECK-RV32-NEXT:    andi a2, a1, 2
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_513
-; CHECK-RV32-NEXT:    j .LBB61_994
-; CHECK-RV32-NEXT:  .LBB61_513: # %else1922
-; CHECK-RV32-NEXT:    andi a2, a1, 4
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_514
-; CHECK-RV32-NEXT:    j .LBB61_995
-; CHECK-RV32-NEXT:  .LBB61_514: # %else1926
-; CHECK-RV32-NEXT:    andi a2, a1, 8
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_515
-; CHECK-RV32-NEXT:    j .LBB61_996
-; CHECK-RV32-NEXT:  .LBB61_515: # %else1930
-; CHECK-RV32-NEXT:    andi a2, a1, 16
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_516
-; CHECK-RV32-NEXT:    j .LBB61_997
-; CHECK-RV32-NEXT:  .LBB61_516: # %else1934
-; CHECK-RV32-NEXT:    andi a2, a1, 32
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_517
-; CHECK-RV32-NEXT:    j .LBB61_998
-; CHECK-RV32-NEXT:  .LBB61_517: # %else1938
-; CHECK-RV32-NEXT:    andi a2, a1, 64
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_518
-; CHECK-RV32-NEXT:    j .LBB61_999
-; CHECK-RV32-NEXT:  .LBB61_518: # %else1942
-; CHECK-RV32-NEXT:    andi a2, a1, 128
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_519
-; CHECK-RV32-NEXT:    j .LBB61_1000
-; CHECK-RV32-NEXT:  .LBB61_519: # %else1946
-; CHECK-RV32-NEXT:    andi a2, a1, 256
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_520
-; CHECK-RV32-NEXT:    j .LBB61_1001
-; CHECK-RV32-NEXT:  .LBB61_520: # %else1950
-; CHECK-RV32-NEXT:    andi a2, a1, 512
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_521
-; CHECK-RV32-NEXT:    j .LBB61_1002
-; CHECK-RV32-NEXT:  .LBB61_521: # %else1954
-; CHECK-RV32-NEXT:    andi a2, a1, 1024
-; CHECK-RV32-NEXT:    beqz a2, .LBB61_522
-; CHECK-RV32-NEXT:    j .LBB61_1003
-; CHECK-RV32-NEXT:  .LBB61_522: # %else1958
-; CHECK-RV32-NEXT:    slli a2, a1, 20
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_523
-; CHECK-RV32-NEXT:    j .LBB61_1004
-; CHECK-RV32-NEXT:  .LBB61_523: # %else1962
-; CHECK-RV32-NEXT:    slli a2, a1, 19
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_524
-; CHECK-RV32-NEXT:    j .LBB61_1005
-; CHECK-RV32-NEXT:  .LBB61_524: # %else1966
-; CHECK-RV32-NEXT:    slli a2, a1, 18
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_525
-; CHECK-RV32-NEXT:    j .LBB61_1006
-; CHECK-RV32-NEXT:  .LBB61_525: # %else1970
-; CHECK-RV32-NEXT:    slli a2, a1, 17
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_526
-; CHECK-RV32-NEXT:    j .LBB61_1007
-; CHECK-RV32-NEXT:  .LBB61_526: # %else1974
-; CHECK-RV32-NEXT:    slli a2, a1, 16
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_527
-; CHECK-RV32-NEXT:    j .LBB61_1008
-; CHECK-RV32-NEXT:  .LBB61_527: # %else1978
-; CHECK-RV32-NEXT:    slli a2, a1, 15
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_528
-; CHECK-RV32-NEXT:    j .LBB61_1009
-; CHECK-RV32-NEXT:  .LBB61_528: # %else1982
-; CHECK-RV32-NEXT:    slli a2, a1, 14
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_529
-; CHECK-RV32-NEXT:    j .LBB61_1010
-; CHECK-RV32-NEXT:  .LBB61_529: # %else1986
-; CHECK-RV32-NEXT:    slli a2, a1, 13
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_530
-; CHECK-RV32-NEXT:    j .LBB61_1011
-; CHECK-RV32-NEXT:  .LBB61_530: # %else1990
-; CHECK-RV32-NEXT:    slli a2, a1, 12
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_531
-; CHECK-RV32-NEXT:    j .LBB61_1012
-; CHECK-RV32-NEXT:  .LBB61_531: # %else1994
-; CHECK-RV32-NEXT:    slli a2, a1, 11
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_532
-; CHECK-RV32-NEXT:    j .LBB61_1013
-; CHECK-RV32-NEXT:  .LBB61_532: # %else1998
-; CHECK-RV32-NEXT:    slli a2, a1, 10
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_533
-; CHECK-RV32-NEXT:    j .LBB61_1014
-; CHECK-RV32-NEXT:  .LBB61_533: # %else2002
-; CHECK-RV32-NEXT:    slli a2, a1, 9
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_534
-; CHECK-RV32-NEXT:    j .LBB61_1015
-; CHECK-RV32-NEXT:  .LBB61_534: # %else2006
-; CHECK-RV32-NEXT:    slli a2, a1, 8
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_535
-; CHECK-RV32-NEXT:    j .LBB61_1016
-; CHECK-RV32-NEXT:  .LBB61_535: # %else2010
-; CHECK-RV32-NEXT:    slli a2, a1, 7
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_536
-; CHECK-RV32-NEXT:    j .LBB61_1017
-; CHECK-RV32-NEXT:  .LBB61_536: # %else2014
-; CHECK-RV32-NEXT:    slli a2, a1, 6
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_537
-; CHECK-RV32-NEXT:    j .LBB61_1018
-; CHECK-RV32-NEXT:  .LBB61_537: # %else2018
-; CHECK-RV32-NEXT:    slli a2, a1, 5
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_538
-; CHECK-RV32-NEXT:    j .LBB61_1019
-; CHECK-RV32-NEXT:  .LBB61_538: # %else2022
-; CHECK-RV32-NEXT:    slli a2, a1, 4
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_539
-; CHECK-RV32-NEXT:    j .LBB61_1020
-; CHECK-RV32-NEXT:  .LBB61_539: # %else2026
-; CHECK-RV32-NEXT:    slli a2, a1, 3
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_540
-; CHECK-RV32-NEXT:    j .LBB61_1021
-; CHECK-RV32-NEXT:  .LBB61_540: # %else2030
-; CHECK-RV32-NEXT:    slli a2, a1, 2
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_541
-; CHECK-RV32-NEXT:    j .LBB61_1022
-; CHECK-RV32-NEXT:  .LBB61_541: # %else2034
-; CHECK-RV32-NEXT:    slli a2, a1, 1
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_542
-; CHECK-RV32-NEXT:    j .LBB61_1023
-; CHECK-RV32-NEXT:  .LBB61_542: # %else2038
-; CHECK-RV32-NEXT:    bgez a1, .LBB61_543
-; CHECK-RV32-NEXT:    j .LBB61_1024
-; CHECK-RV32-NEXT:  .LBB61_543: # %else2042
-; CHECK-RV32-NEXT:    ret
-; CHECK-RV32-NEXT:  .LBB61_544: # %cond.load
-; CHECK-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    vsetvli zero, zero, e8, mf8, tu, ma
-; CHECK-RV32-NEXT:    vmv.s.x v8, a1
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    andi a1, a3, 2
-; CHECK-RV32-NEXT:    bnez a1, .LBB61_545
-; CHECK-RV32-NEXT:    j .LBB61_2
-; CHECK-RV32-NEXT:  .LBB61_545: # %cond.load1
-; CHECK-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-RV32-NEXT:    vsetvli zero, zero, e8, mf8, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    vsetivli zero, 2, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 1
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a1, a3, 4
-; CHECK-RV32-NEXT:    bnez a1, .LBB61_546
-; CHECK-RV32-NEXT:    j .LBB61_3
-; CHECK-RV32-NEXT:  .LBB61_546: # %cond.load5
-; CHECK-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-RV32-NEXT:    vsetivli zero, 3, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 2
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a1, a3, 8
-; CHECK-RV32-NEXT:    bnez a1, .LBB61_547
-; CHECK-RV32-NEXT:    j .LBB61_4
-; CHECK-RV32-NEXT:  .LBB61_547: # %cond.load9
-; CHECK-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-RV32-NEXT:    vsetivli zero, 4, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 3
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a1, a3, 16
-; CHECK-RV32-NEXT:    bnez a1, .LBB61_548
-; CHECK-RV32-NEXT:    j .LBB61_5
-; CHECK-RV32-NEXT:  .LBB61_548: # %cond.load13
-; CHECK-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-RV32-NEXT:    vsetivli zero, 5, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a1, a3, 32
-; CHECK-RV32-NEXT:    bnez a1, .LBB61_549
-; CHECK-RV32-NEXT:    j .LBB61_6
-; CHECK-RV32-NEXT:  .LBB61_549: # %cond.load17
-; CHECK-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-RV32-NEXT:    vsetivli zero, 6, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 5
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a1, a3, 64
-; CHECK-RV32-NEXT:    bnez a1, .LBB61_550
-; CHECK-RV32-NEXT:    j .LBB61_7
-; CHECK-RV32-NEXT:  .LBB61_550: # %cond.load21
-; CHECK-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-RV32-NEXT:    vsetivli zero, 7, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 6
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a1, a3, 128
-; CHECK-RV32-NEXT:    bnez a1, .LBB61_551
-; CHECK-RV32-NEXT:    j .LBB61_8
-; CHECK-RV32-NEXT:  .LBB61_551: # %cond.load25
-; CHECK-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-RV32-NEXT:    vsetivli zero, 8, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 7
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a1, a3, 256
-; CHECK-RV32-NEXT:    bnez a1, .LBB61_552
-; CHECK-RV32-NEXT:    j .LBB61_9
-; CHECK-RV32-NEXT:  .LBB61_552: # %cond.load29
-; CHECK-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-RV32-NEXT:    vsetivli zero, 9, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 8
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a1, a3, 512
-; CHECK-RV32-NEXT:    bnez a1, .LBB61_553
-; CHECK-RV32-NEXT:    j .LBB61_10
-; CHECK-RV32-NEXT:  .LBB61_553: # %cond.load33
-; CHECK-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-RV32-NEXT:    vsetivli zero, 10, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 9
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a1, a3, 1024
-; CHECK-RV32-NEXT:    bnez a1, .LBB61_554
-; CHECK-RV32-NEXT:    j .LBB61_11
-; CHECK-RV32-NEXT:  .LBB61_554: # %cond.load37
-; CHECK-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-RV32-NEXT:    vsetivli zero, 11, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 10
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a1, a3, 20
-; CHECK-RV32-NEXT:    bltz a1, .LBB61_555
-; CHECK-RV32-NEXT:    j .LBB61_12
-; CHECK-RV32-NEXT:  .LBB61_555: # %cond.load41
-; CHECK-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-RV32-NEXT:    vsetivli zero, 12, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 11
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a1, a3, 19
-; CHECK-RV32-NEXT:    bltz a1, .LBB61_556
-; CHECK-RV32-NEXT:    j .LBB61_13
-; CHECK-RV32-NEXT:  .LBB61_556: # %cond.load45
-; CHECK-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-RV32-NEXT:    vsetivli zero, 13, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 12
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a1, a3, 18
-; CHECK-RV32-NEXT:    bltz a1, .LBB61_557
-; CHECK-RV32-NEXT:    j .LBB61_14
-; CHECK-RV32-NEXT:  .LBB61_557: # %cond.load49
-; CHECK-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-RV32-NEXT:    vsetivli zero, 14, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 13
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a1, a3, 17
-; CHECK-RV32-NEXT:    bltz a1, .LBB61_558
-; CHECK-RV32-NEXT:    j .LBB61_15
-; CHECK-RV32-NEXT:  .LBB61_558: # %cond.load53
-; CHECK-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-RV32-NEXT:    vsetivli zero, 15, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 14
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a1, a3, 16
-; CHECK-RV32-NEXT:    bltz a1, .LBB61_559
-; CHECK-RV32-NEXT:    j .LBB61_16
-; CHECK-RV32-NEXT:  .LBB61_559: # %cond.load57
-; CHECK-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-RV32-NEXT:    vsetivli zero, 16, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 15
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a1, a3, 15
-; CHECK-RV32-NEXT:    bltz a1, .LBB61_560
-; CHECK-RV32-NEXT:    j .LBB61_17
-; CHECK-RV32-NEXT:  .LBB61_560: # %cond.load61
-; CHECK-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-RV32-NEXT:    vsetivli zero, 17, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 16
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a1, a3, 14
-; CHECK-RV32-NEXT:    bltz a1, .LBB61_561
-; CHECK-RV32-NEXT:    j .LBB61_18
-; CHECK-RV32-NEXT:  .LBB61_561: # %cond.load65
-; CHECK-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-RV32-NEXT:    vsetivli zero, 18, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 17
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a1, a3, 13
-; CHECK-RV32-NEXT:    bltz a1, .LBB61_562
-; CHECK-RV32-NEXT:    j .LBB61_19
-; CHECK-RV32-NEXT:  .LBB61_562: # %cond.load69
-; CHECK-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-RV32-NEXT:    vsetivli zero, 19, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 18
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a1, a3, 12
-; CHECK-RV32-NEXT:    bltz a1, .LBB61_563
-; CHECK-RV32-NEXT:    j .LBB61_20
-; CHECK-RV32-NEXT:  .LBB61_563: # %cond.load73
-; CHECK-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-RV32-NEXT:    vsetivli zero, 20, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 19
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a1, a3, 11
-; CHECK-RV32-NEXT:    bltz a1, .LBB61_564
-; CHECK-RV32-NEXT:    j .LBB61_21
-; CHECK-RV32-NEXT:  .LBB61_564: # %cond.load77
-; CHECK-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-RV32-NEXT:    vsetivli zero, 21, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 20
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a1, a3, 10
-; CHECK-RV32-NEXT:    bltz a1, .LBB61_565
-; CHECK-RV32-NEXT:    j .LBB61_22
-; CHECK-RV32-NEXT:  .LBB61_565: # %cond.load81
-; CHECK-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-RV32-NEXT:    vsetivli zero, 22, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 21
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a1, a3, 9
-; CHECK-RV32-NEXT:    bltz a1, .LBB61_566
-; CHECK-RV32-NEXT:    j .LBB61_23
-; CHECK-RV32-NEXT:  .LBB61_566: # %cond.load85
-; CHECK-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-RV32-NEXT:    vsetivli zero, 23, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 22
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a1, a3, 8
-; CHECK-RV32-NEXT:    bltz a1, .LBB61_567
-; CHECK-RV32-NEXT:    j .LBB61_24
-; CHECK-RV32-NEXT:  .LBB61_567: # %cond.load89
-; CHECK-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-RV32-NEXT:    vsetivli zero, 24, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 23
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a1, a3, 7
-; CHECK-RV32-NEXT:    bltz a1, .LBB61_568
-; CHECK-RV32-NEXT:    j .LBB61_25
-; CHECK-RV32-NEXT:  .LBB61_568: # %cond.load93
-; CHECK-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-RV32-NEXT:    vsetivli zero, 25, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 24
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a1, a3, 6
-; CHECK-RV32-NEXT:    bltz a1, .LBB61_569
-; CHECK-RV32-NEXT:    j .LBB61_26
-; CHECK-RV32-NEXT:  .LBB61_569: # %cond.load97
-; CHECK-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-RV32-NEXT:    vsetivli zero, 26, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 25
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a1, a3, 5
-; CHECK-RV32-NEXT:    bltz a1, .LBB61_570
-; CHECK-RV32-NEXT:    j .LBB61_27
-; CHECK-RV32-NEXT:  .LBB61_570: # %cond.load101
-; CHECK-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-RV32-NEXT:    vsetivli zero, 27, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 26
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a1, a3, 4
-; CHECK-RV32-NEXT:    bltz a1, .LBB61_571
-; CHECK-RV32-NEXT:    j .LBB61_28
-; CHECK-RV32-NEXT:  .LBB61_571: # %cond.load105
-; CHECK-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-RV32-NEXT:    vsetivli zero, 28, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 27
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a1, a3, 3
-; CHECK-RV32-NEXT:    bgez a1, .LBB61_1025
-; CHECK-RV32-NEXT:    j .LBB61_29
-; CHECK-RV32-NEXT:  .LBB61_1025: # %cond.load105
-; CHECK-RV32-NEXT:    j .LBB61_30
-; CHECK-RV32-NEXT:  .LBB61_572: # %cond.load121
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 32
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vi v8, v24, 31
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    andi a3, a2, 1
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_573
-; CHECK-RV32-NEXT:    j .LBB61_36
-; CHECK-RV32-NEXT:  .LBB61_573: # %cond.load125
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 33
-; CHECK-RV32-NEXT:    li a4, 32
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    andi a3, a2, 2
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_574
-; CHECK-RV32-NEXT:    j .LBB61_37
-; CHECK-RV32-NEXT:  .LBB61_574: # %cond.load129
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 34
-; CHECK-RV32-NEXT:    li a4, 33
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    andi a3, a2, 4
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_575
-; CHECK-RV32-NEXT:    j .LBB61_38
-; CHECK-RV32-NEXT:  .LBB61_575: # %cond.load133
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 35
-; CHECK-RV32-NEXT:    li a4, 34
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    andi a3, a2, 8
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_576
-; CHECK-RV32-NEXT:    j .LBB61_39
-; CHECK-RV32-NEXT:  .LBB61_576: # %cond.load137
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 36
-; CHECK-RV32-NEXT:    li a4, 35
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    andi a3, a2, 16
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_577
-; CHECK-RV32-NEXT:    j .LBB61_40
-; CHECK-RV32-NEXT:  .LBB61_577: # %cond.load141
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 37
-; CHECK-RV32-NEXT:    li a4, 36
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    andi a3, a2, 32
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_578
-; CHECK-RV32-NEXT:    j .LBB61_41
-; CHECK-RV32-NEXT:  .LBB61_578: # %cond.load145
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 38
-; CHECK-RV32-NEXT:    li a4, 37
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    andi a3, a2, 64
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_579
-; CHECK-RV32-NEXT:    j .LBB61_42
-; CHECK-RV32-NEXT:  .LBB61_579: # %cond.load149
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 39
-; CHECK-RV32-NEXT:    li a4, 38
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    andi a3, a2, 128
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_580
-; CHECK-RV32-NEXT:    j .LBB61_43
-; CHECK-RV32-NEXT:  .LBB61_580: # %cond.load153
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 40
-; CHECK-RV32-NEXT:    li a4, 39
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    andi a3, a2, 256
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_581
-; CHECK-RV32-NEXT:    j .LBB61_44
-; CHECK-RV32-NEXT:  .LBB61_581: # %cond.load157
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 41
-; CHECK-RV32-NEXT:    li a4, 40
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    andi a3, a2, 512
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_582
-; CHECK-RV32-NEXT:    j .LBB61_45
-; CHECK-RV32-NEXT:  .LBB61_582: # %cond.load161
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 42
-; CHECK-RV32-NEXT:    li a4, 41
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_583
-; CHECK-RV32-NEXT:    j .LBB61_46
-; CHECK-RV32-NEXT:  .LBB61_583: # %cond.load165
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 43
-; CHECK-RV32-NEXT:    li a4, 42
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    slli a3, a2, 20
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_584
-; CHECK-RV32-NEXT:    j .LBB61_47
-; CHECK-RV32-NEXT:  .LBB61_584: # %cond.load169
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 44
-; CHECK-RV32-NEXT:    li a4, 43
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    slli a3, a2, 19
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_585
-; CHECK-RV32-NEXT:    j .LBB61_48
-; CHECK-RV32-NEXT:  .LBB61_585: # %cond.load173
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 45
-; CHECK-RV32-NEXT:    li a4, 44
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    slli a3, a2, 18
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_586
-; CHECK-RV32-NEXT:    j .LBB61_49
-; CHECK-RV32-NEXT:  .LBB61_586: # %cond.load177
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 46
-; CHECK-RV32-NEXT:    li a4, 45
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    slli a3, a2, 17
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_587
-; CHECK-RV32-NEXT:    j .LBB61_50
-; CHECK-RV32-NEXT:  .LBB61_587: # %cond.load181
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 47
-; CHECK-RV32-NEXT:    li a4, 46
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    slli a3, a2, 16
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_588
-; CHECK-RV32-NEXT:    j .LBB61_51
-; CHECK-RV32-NEXT:  .LBB61_588: # %cond.load185
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 48
-; CHECK-RV32-NEXT:    li a4, 47
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    slli a3, a2, 15
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_589
-; CHECK-RV32-NEXT:    j .LBB61_52
-; CHECK-RV32-NEXT:  .LBB61_589: # %cond.load189
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 49
-; CHECK-RV32-NEXT:    li a4, 48
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    slli a3, a2, 14
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_590
-; CHECK-RV32-NEXT:    j .LBB61_53
-; CHECK-RV32-NEXT:  .LBB61_590: # %cond.load193
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 50
-; CHECK-RV32-NEXT:    li a4, 49
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    slli a3, a2, 13
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_591
-; CHECK-RV32-NEXT:    j .LBB61_54
-; CHECK-RV32-NEXT:  .LBB61_591: # %cond.load197
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 51
-; CHECK-RV32-NEXT:    li a4, 50
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    slli a3, a2, 12
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_592
-; CHECK-RV32-NEXT:    j .LBB61_55
-; CHECK-RV32-NEXT:  .LBB61_592: # %cond.load201
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 52
-; CHECK-RV32-NEXT:    li a4, 51
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    slli a3, a2, 11
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_593
-; CHECK-RV32-NEXT:    j .LBB61_56
-; CHECK-RV32-NEXT:  .LBB61_593: # %cond.load205
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 53
-; CHECK-RV32-NEXT:    li a4, 52
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    slli a3, a2, 10
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_594
-; CHECK-RV32-NEXT:    j .LBB61_57
-; CHECK-RV32-NEXT:  .LBB61_594: # %cond.load209
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 54
-; CHECK-RV32-NEXT:    li a4, 53
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    slli a3, a2, 9
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_595
-; CHECK-RV32-NEXT:    j .LBB61_58
-; CHECK-RV32-NEXT:  .LBB61_595: # %cond.load213
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 55
-; CHECK-RV32-NEXT:    li a4, 54
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    slli a3, a2, 8
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_596
-; CHECK-RV32-NEXT:    j .LBB61_59
-; CHECK-RV32-NEXT:  .LBB61_596: # %cond.load217
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 56
-; CHECK-RV32-NEXT:    li a4, 55
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    slli a3, a2, 7
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_597
-; CHECK-RV32-NEXT:    j .LBB61_60
-; CHECK-RV32-NEXT:  .LBB61_597: # %cond.load221
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 57
-; CHECK-RV32-NEXT:    li a4, 56
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    slli a3, a2, 6
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_598
-; CHECK-RV32-NEXT:    j .LBB61_61
-; CHECK-RV32-NEXT:  .LBB61_598: # %cond.load225
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 58
-; CHECK-RV32-NEXT:    li a4, 57
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    slli a3, a2, 5
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_599
-; CHECK-RV32-NEXT:    j .LBB61_62
-; CHECK-RV32-NEXT:  .LBB61_599: # %cond.load229
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 59
-; CHECK-RV32-NEXT:    li a4, 58
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    slli a3, a2, 4
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_600
-; CHECK-RV32-NEXT:    j .LBB61_63
-; CHECK-RV32-NEXT:  .LBB61_600: # %cond.load233
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 60
-; CHECK-RV32-NEXT:    li a4, 59
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    slli a3, a2, 3
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_601
-; CHECK-RV32-NEXT:    j .LBB61_64
-; CHECK-RV32-NEXT:  .LBB61_601: # %cond.load237
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 61
-; CHECK-RV32-NEXT:    li a4, 60
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    slli a3, a2, 2
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_1026
-; CHECK-RV32-NEXT:    j .LBB61_65
-; CHECK-RV32-NEXT:  .LBB61_1026: # %cond.load237
-; CHECK-RV32-NEXT:    j .LBB61_66
-; CHECK-RV32-NEXT:  .LBB61_602: # %cond.load249
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    vmv.s.x v17, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 64
-; CHECK-RV32-NEXT:    li a4, 63
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m1, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v17, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a2, a3, 1
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_603
-; CHECK-RV32-NEXT:    j .LBB61_70
-; CHECK-RV32-NEXT:  .LBB61_603: # %cond.load253
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 65
-; CHECK-RV32-NEXT:    li a4, 64
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a2, a3, 2
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_604
-; CHECK-RV32-NEXT:    j .LBB61_71
-; CHECK-RV32-NEXT:  .LBB61_604: # %cond.load257
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 66
-; CHECK-RV32-NEXT:    li a4, 65
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a2, a3, 4
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_605
-; CHECK-RV32-NEXT:    j .LBB61_72
-; CHECK-RV32-NEXT:  .LBB61_605: # %cond.load261
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 67
-; CHECK-RV32-NEXT:    li a4, 66
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a2, a3, 8
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_606
-; CHECK-RV32-NEXT:    j .LBB61_73
-; CHECK-RV32-NEXT:  .LBB61_606: # %cond.load265
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 68
-; CHECK-RV32-NEXT:    li a4, 67
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a2, a3, 16
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_607
-; CHECK-RV32-NEXT:    j .LBB61_74
-; CHECK-RV32-NEXT:  .LBB61_607: # %cond.load269
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 69
-; CHECK-RV32-NEXT:    li a4, 68
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a2, a3, 32
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_608
-; CHECK-RV32-NEXT:    j .LBB61_75
-; CHECK-RV32-NEXT:  .LBB61_608: # %cond.load273
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 70
-; CHECK-RV32-NEXT:    li a4, 69
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a2, a3, 64
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_609
-; CHECK-RV32-NEXT:    j .LBB61_76
-; CHECK-RV32-NEXT:  .LBB61_609: # %cond.load277
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 71
-; CHECK-RV32-NEXT:    li a4, 70
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a2, a3, 128
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_610
-; CHECK-RV32-NEXT:    j .LBB61_77
-; CHECK-RV32-NEXT:  .LBB61_610: # %cond.load281
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 72
-; CHECK-RV32-NEXT:    li a4, 71
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a2, a3, 256
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_611
-; CHECK-RV32-NEXT:    j .LBB61_78
-; CHECK-RV32-NEXT:  .LBB61_611: # %cond.load285
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 73
-; CHECK-RV32-NEXT:    li a4, 72
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a2, a3, 512
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_612
-; CHECK-RV32-NEXT:    j .LBB61_79
-; CHECK-RV32-NEXT:  .LBB61_612: # %cond.load289
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 74
-; CHECK-RV32-NEXT:    li a4, 73
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_613
-; CHECK-RV32-NEXT:    j .LBB61_80
-; CHECK-RV32-NEXT:  .LBB61_613: # %cond.load293
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 75
-; CHECK-RV32-NEXT:    li a4, 74
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 20
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_614
-; CHECK-RV32-NEXT:    j .LBB61_81
-; CHECK-RV32-NEXT:  .LBB61_614: # %cond.load297
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 76
-; CHECK-RV32-NEXT:    li a4, 75
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 19
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_615
-; CHECK-RV32-NEXT:    j .LBB61_82
-; CHECK-RV32-NEXT:  .LBB61_615: # %cond.load301
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 77
-; CHECK-RV32-NEXT:    li a4, 76
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 18
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_616
-; CHECK-RV32-NEXT:    j .LBB61_83
-; CHECK-RV32-NEXT:  .LBB61_616: # %cond.load305
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 78
-; CHECK-RV32-NEXT:    li a4, 77
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 17
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_617
-; CHECK-RV32-NEXT:    j .LBB61_84
-; CHECK-RV32-NEXT:  .LBB61_617: # %cond.load309
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 79
-; CHECK-RV32-NEXT:    li a4, 78
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 16
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_618
-; CHECK-RV32-NEXT:    j .LBB61_85
-; CHECK-RV32-NEXT:  .LBB61_618: # %cond.load313
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 80
-; CHECK-RV32-NEXT:    li a4, 79
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 15
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_619
-; CHECK-RV32-NEXT:    j .LBB61_86
-; CHECK-RV32-NEXT:  .LBB61_619: # %cond.load317
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 81
-; CHECK-RV32-NEXT:    li a4, 80
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 14
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_620
-; CHECK-RV32-NEXT:    j .LBB61_87
-; CHECK-RV32-NEXT:  .LBB61_620: # %cond.load321
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 82
-; CHECK-RV32-NEXT:    li a4, 81
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 13
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_621
-; CHECK-RV32-NEXT:    j .LBB61_88
-; CHECK-RV32-NEXT:  .LBB61_621: # %cond.load325
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 83
-; CHECK-RV32-NEXT:    li a4, 82
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 12
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_622
-; CHECK-RV32-NEXT:    j .LBB61_89
-; CHECK-RV32-NEXT:  .LBB61_622: # %cond.load329
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 84
-; CHECK-RV32-NEXT:    li a4, 83
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 11
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_623
-; CHECK-RV32-NEXT:    j .LBB61_90
-; CHECK-RV32-NEXT:  .LBB61_623: # %cond.load333
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 85
-; CHECK-RV32-NEXT:    li a4, 84
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 10
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_624
-; CHECK-RV32-NEXT:    j .LBB61_91
-; CHECK-RV32-NEXT:  .LBB61_624: # %cond.load337
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 86
-; CHECK-RV32-NEXT:    li a4, 85
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 9
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_625
-; CHECK-RV32-NEXT:    j .LBB61_92
-; CHECK-RV32-NEXT:  .LBB61_625: # %cond.load341
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 87
-; CHECK-RV32-NEXT:    li a4, 86
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 8
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_626
-; CHECK-RV32-NEXT:    j .LBB61_93
-; CHECK-RV32-NEXT:  .LBB61_626: # %cond.load345
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 88
-; CHECK-RV32-NEXT:    li a4, 87
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 7
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_627
-; CHECK-RV32-NEXT:    j .LBB61_94
-; CHECK-RV32-NEXT:  .LBB61_627: # %cond.load349
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 89
-; CHECK-RV32-NEXT:    li a4, 88
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 6
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_628
-; CHECK-RV32-NEXT:    j .LBB61_95
-; CHECK-RV32-NEXT:  .LBB61_628: # %cond.load353
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 90
-; CHECK-RV32-NEXT:    li a4, 89
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 5
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_629
-; CHECK-RV32-NEXT:    j .LBB61_96
-; CHECK-RV32-NEXT:  .LBB61_629: # %cond.load357
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 91
-; CHECK-RV32-NEXT:    li a4, 90
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 4
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_630
-; CHECK-RV32-NEXT:    j .LBB61_97
-; CHECK-RV32-NEXT:  .LBB61_630: # %cond.load361
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 92
-; CHECK-RV32-NEXT:    li a4, 91
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 3
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_631
-; CHECK-RV32-NEXT:    j .LBB61_98
-; CHECK-RV32-NEXT:  .LBB61_631: # %cond.load365
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 93
-; CHECK-RV32-NEXT:    li a4, 92
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 2
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_1027
-; CHECK-RV32-NEXT:    j .LBB61_99
-; CHECK-RV32-NEXT:  .LBB61_1027: # %cond.load365
-; CHECK-RV32-NEXT:    j .LBB61_100
-; CHECK-RV32-NEXT:  .LBB61_632: # %cond.load377
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 96
-; CHECK-RV32-NEXT:    li a4, 95
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    andi a3, a2, 1
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_633
-; CHECK-RV32-NEXT:    j .LBB61_104
-; CHECK-RV32-NEXT:  .LBB61_633: # %cond.load381
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 97
-; CHECK-RV32-NEXT:    li a4, 96
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    andi a3, a2, 2
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_634
-; CHECK-RV32-NEXT:    j .LBB61_105
-; CHECK-RV32-NEXT:  .LBB61_634: # %cond.load385
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 98
-; CHECK-RV32-NEXT:    li a4, 97
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    andi a3, a2, 4
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_635
-; CHECK-RV32-NEXT:    j .LBB61_106
-; CHECK-RV32-NEXT:  .LBB61_635: # %cond.load389
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 99
-; CHECK-RV32-NEXT:    li a4, 98
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    andi a3, a2, 8
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_636
-; CHECK-RV32-NEXT:    j .LBB61_107
-; CHECK-RV32-NEXT:  .LBB61_636: # %cond.load393
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 100
-; CHECK-RV32-NEXT:    li a4, 99
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    andi a3, a2, 16
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_637
-; CHECK-RV32-NEXT:    j .LBB61_108
-; CHECK-RV32-NEXT:  .LBB61_637: # %cond.load397
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 101
-; CHECK-RV32-NEXT:    li a4, 100
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    andi a3, a2, 32
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_638
-; CHECK-RV32-NEXT:    j .LBB61_109
-; CHECK-RV32-NEXT:  .LBB61_638: # %cond.load401
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 102
-; CHECK-RV32-NEXT:    li a4, 101
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    andi a3, a2, 64
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_639
-; CHECK-RV32-NEXT:    j .LBB61_110
-; CHECK-RV32-NEXT:  .LBB61_639: # %cond.load405
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 103
-; CHECK-RV32-NEXT:    li a4, 102
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    andi a3, a2, 128
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_640
-; CHECK-RV32-NEXT:    j .LBB61_111
-; CHECK-RV32-NEXT:  .LBB61_640: # %cond.load409
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 104
-; CHECK-RV32-NEXT:    li a4, 103
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    andi a3, a2, 256
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_641
-; CHECK-RV32-NEXT:    j .LBB61_112
-; CHECK-RV32-NEXT:  .LBB61_641: # %cond.load413
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 105
-; CHECK-RV32-NEXT:    li a4, 104
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    andi a3, a2, 512
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_642
-; CHECK-RV32-NEXT:    j .LBB61_113
-; CHECK-RV32-NEXT:  .LBB61_642: # %cond.load417
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 106
-; CHECK-RV32-NEXT:    li a4, 105
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_643
-; CHECK-RV32-NEXT:    j .LBB61_114
-; CHECK-RV32-NEXT:  .LBB61_643: # %cond.load421
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 107
-; CHECK-RV32-NEXT:    li a4, 106
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    slli a3, a2, 20
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_644
-; CHECK-RV32-NEXT:    j .LBB61_115
-; CHECK-RV32-NEXT:  .LBB61_644: # %cond.load425
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 108
-; CHECK-RV32-NEXT:    li a4, 107
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    slli a3, a2, 19
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_645
-; CHECK-RV32-NEXT:    j .LBB61_116
-; CHECK-RV32-NEXT:  .LBB61_645: # %cond.load429
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 109
-; CHECK-RV32-NEXT:    li a4, 108
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    slli a3, a2, 18
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_646
-; CHECK-RV32-NEXT:    j .LBB61_117
-; CHECK-RV32-NEXT:  .LBB61_646: # %cond.load433
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 110
-; CHECK-RV32-NEXT:    li a4, 109
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    slli a3, a2, 17
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_647
-; CHECK-RV32-NEXT:    j .LBB61_118
-; CHECK-RV32-NEXT:  .LBB61_647: # %cond.load437
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 111
-; CHECK-RV32-NEXT:    li a4, 110
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    slli a3, a2, 16
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_648
-; CHECK-RV32-NEXT:    j .LBB61_119
-; CHECK-RV32-NEXT:  .LBB61_648: # %cond.load441
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 112
-; CHECK-RV32-NEXT:    li a4, 111
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    slli a3, a2, 15
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_649
-; CHECK-RV32-NEXT:    j .LBB61_120
-; CHECK-RV32-NEXT:  .LBB61_649: # %cond.load445
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 113
-; CHECK-RV32-NEXT:    li a4, 112
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    slli a3, a2, 14
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_650
-; CHECK-RV32-NEXT:    j .LBB61_121
-; CHECK-RV32-NEXT:  .LBB61_650: # %cond.load449
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 114
-; CHECK-RV32-NEXT:    li a4, 113
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    slli a3, a2, 13
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_651
-; CHECK-RV32-NEXT:    j .LBB61_122
-; CHECK-RV32-NEXT:  .LBB61_651: # %cond.load453
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 115
-; CHECK-RV32-NEXT:    li a4, 114
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    slli a3, a2, 12
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_652
-; CHECK-RV32-NEXT:    j .LBB61_123
-; CHECK-RV32-NEXT:  .LBB61_652: # %cond.load457
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 116
-; CHECK-RV32-NEXT:    li a4, 115
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    slli a3, a2, 11
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_653
-; CHECK-RV32-NEXT:    j .LBB61_124
-; CHECK-RV32-NEXT:  .LBB61_653: # %cond.load461
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 117
-; CHECK-RV32-NEXT:    li a4, 116
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    slli a3, a2, 10
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_654
-; CHECK-RV32-NEXT:    j .LBB61_125
-; CHECK-RV32-NEXT:  .LBB61_654: # %cond.load465
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 118
-; CHECK-RV32-NEXT:    li a4, 117
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    slli a3, a2, 9
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_655
-; CHECK-RV32-NEXT:    j .LBB61_126
-; CHECK-RV32-NEXT:  .LBB61_655: # %cond.load469
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 119
-; CHECK-RV32-NEXT:    li a4, 118
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    slli a3, a2, 8
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_656
-; CHECK-RV32-NEXT:    j .LBB61_127
-; CHECK-RV32-NEXT:  .LBB61_656: # %cond.load473
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 120
-; CHECK-RV32-NEXT:    li a4, 119
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    slli a3, a2, 7
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_657
-; CHECK-RV32-NEXT:    j .LBB61_128
-; CHECK-RV32-NEXT:  .LBB61_657: # %cond.load477
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 121
-; CHECK-RV32-NEXT:    li a4, 120
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    slli a3, a2, 6
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_658
-; CHECK-RV32-NEXT:    j .LBB61_129
-; CHECK-RV32-NEXT:  .LBB61_658: # %cond.load481
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 122
-; CHECK-RV32-NEXT:    li a4, 121
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    slli a3, a2, 5
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_659
-; CHECK-RV32-NEXT:    j .LBB61_130
-; CHECK-RV32-NEXT:  .LBB61_659: # %cond.load485
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 123
-; CHECK-RV32-NEXT:    li a4, 122
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    slli a3, a2, 4
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_660
-; CHECK-RV32-NEXT:    j .LBB61_131
-; CHECK-RV32-NEXT:  .LBB61_660: # %cond.load489
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 124
-; CHECK-RV32-NEXT:    li a4, 123
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    slli a3, a2, 3
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_661
-; CHECK-RV32-NEXT:    j .LBB61_132
-; CHECK-RV32-NEXT:  .LBB61_661: # %cond.load493
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-RV32-NEXT:    li a3, 125
-; CHECK-RV32-NEXT:    li a4, 124
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-RV32-NEXT:    slli a3, a2, 2
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_1028
-; CHECK-RV32-NEXT:    j .LBB61_133
-; CHECK-RV32-NEXT:  .LBB61_1028: # %cond.load493
-; CHECK-RV32-NEXT:    j .LBB61_134
-; CHECK-RV32-NEXT:  .LBB61_662: # %cond.load505
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 128
-; CHECK-RV32-NEXT:    li a4, 127
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a2, a3, 1
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_663
-; CHECK-RV32-NEXT:    j .LBB61_138
-; CHECK-RV32-NEXT:  .LBB61_663: # %cond.load509
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 129
-; CHECK-RV32-NEXT:    li a4, 128
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a2, a3, 2
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_664
-; CHECK-RV32-NEXT:    j .LBB61_139
-; CHECK-RV32-NEXT:  .LBB61_664: # %cond.load513
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 130
-; CHECK-RV32-NEXT:    li a4, 129
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a2, a3, 4
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_665
-; CHECK-RV32-NEXT:    j .LBB61_140
-; CHECK-RV32-NEXT:  .LBB61_665: # %cond.load517
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 131
-; CHECK-RV32-NEXT:    li a4, 130
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a2, a3, 8
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_666
-; CHECK-RV32-NEXT:    j .LBB61_141
-; CHECK-RV32-NEXT:  .LBB61_666: # %cond.load521
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 132
-; CHECK-RV32-NEXT:    li a4, 131
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a2, a3, 16
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_667
-; CHECK-RV32-NEXT:    j .LBB61_142
-; CHECK-RV32-NEXT:  .LBB61_667: # %cond.load525
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 133
-; CHECK-RV32-NEXT:    li a4, 132
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a2, a3, 32
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_668
-; CHECK-RV32-NEXT:    j .LBB61_143
-; CHECK-RV32-NEXT:  .LBB61_668: # %cond.load529
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 134
-; CHECK-RV32-NEXT:    li a4, 133
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a2, a3, 64
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_669
-; CHECK-RV32-NEXT:    j .LBB61_144
-; CHECK-RV32-NEXT:  .LBB61_669: # %cond.load533
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 135
-; CHECK-RV32-NEXT:    li a4, 134
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a2, a3, 128
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_670
-; CHECK-RV32-NEXT:    j .LBB61_145
-; CHECK-RV32-NEXT:  .LBB61_670: # %cond.load537
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 136
-; CHECK-RV32-NEXT:    li a4, 135
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a2, a3, 256
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_671
-; CHECK-RV32-NEXT:    j .LBB61_146
-; CHECK-RV32-NEXT:  .LBB61_671: # %cond.load541
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 137
-; CHECK-RV32-NEXT:    li a4, 136
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a2, a3, 512
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_672
-; CHECK-RV32-NEXT:    j .LBB61_147
-; CHECK-RV32-NEXT:  .LBB61_672: # %cond.load545
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 138
-; CHECK-RV32-NEXT:    li a4, 137
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_673
-; CHECK-RV32-NEXT:    j .LBB61_148
-; CHECK-RV32-NEXT:  .LBB61_673: # %cond.load549
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 139
-; CHECK-RV32-NEXT:    li a4, 138
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 20
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_674
-; CHECK-RV32-NEXT:    j .LBB61_149
-; CHECK-RV32-NEXT:  .LBB61_674: # %cond.load553
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 140
-; CHECK-RV32-NEXT:    li a4, 139
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 19
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_675
-; CHECK-RV32-NEXT:    j .LBB61_150
-; CHECK-RV32-NEXT:  .LBB61_675: # %cond.load557
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 141
-; CHECK-RV32-NEXT:    li a4, 140
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 18
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_676
-; CHECK-RV32-NEXT:    j .LBB61_151
-; CHECK-RV32-NEXT:  .LBB61_676: # %cond.load561
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 142
-; CHECK-RV32-NEXT:    li a4, 141
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 17
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_677
-; CHECK-RV32-NEXT:    j .LBB61_152
-; CHECK-RV32-NEXT:  .LBB61_677: # %cond.load565
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 143
-; CHECK-RV32-NEXT:    li a4, 142
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 16
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_678
-; CHECK-RV32-NEXT:    j .LBB61_153
-; CHECK-RV32-NEXT:  .LBB61_678: # %cond.load569
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 144
-; CHECK-RV32-NEXT:    li a4, 143
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 15
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_679
-; CHECK-RV32-NEXT:    j .LBB61_154
-; CHECK-RV32-NEXT:  .LBB61_679: # %cond.load573
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 145
-; CHECK-RV32-NEXT:    li a4, 144
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 14
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_680
-; CHECK-RV32-NEXT:    j .LBB61_155
-; CHECK-RV32-NEXT:  .LBB61_680: # %cond.load577
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 146
-; CHECK-RV32-NEXT:    li a4, 145
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 13
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_681
-; CHECK-RV32-NEXT:    j .LBB61_156
-; CHECK-RV32-NEXT:  .LBB61_681: # %cond.load581
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 147
-; CHECK-RV32-NEXT:    li a4, 146
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 12
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_682
-; CHECK-RV32-NEXT:    j .LBB61_157
-; CHECK-RV32-NEXT:  .LBB61_682: # %cond.load585
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 148
-; CHECK-RV32-NEXT:    li a4, 147
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 11
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_683
-; CHECK-RV32-NEXT:    j .LBB61_158
-; CHECK-RV32-NEXT:  .LBB61_683: # %cond.load589
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 149
-; CHECK-RV32-NEXT:    li a4, 148
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 10
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_684
-; CHECK-RV32-NEXT:    j .LBB61_159
-; CHECK-RV32-NEXT:  .LBB61_684: # %cond.load593
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 150
-; CHECK-RV32-NEXT:    li a4, 149
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 9
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_685
-; CHECK-RV32-NEXT:    j .LBB61_160
-; CHECK-RV32-NEXT:  .LBB61_685: # %cond.load597
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 151
-; CHECK-RV32-NEXT:    li a4, 150
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 8
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_686
-; CHECK-RV32-NEXT:    j .LBB61_161
-; CHECK-RV32-NEXT:  .LBB61_686: # %cond.load601
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 152
-; CHECK-RV32-NEXT:    li a4, 151
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 7
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_687
-; CHECK-RV32-NEXT:    j .LBB61_162
-; CHECK-RV32-NEXT:  .LBB61_687: # %cond.load605
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 153
-; CHECK-RV32-NEXT:    li a4, 152
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 6
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_688
-; CHECK-RV32-NEXT:    j .LBB61_163
-; CHECK-RV32-NEXT:  .LBB61_688: # %cond.load609
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 154
-; CHECK-RV32-NEXT:    li a4, 153
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 5
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_689
-; CHECK-RV32-NEXT:    j .LBB61_164
-; CHECK-RV32-NEXT:  .LBB61_689: # %cond.load613
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 155
-; CHECK-RV32-NEXT:    li a4, 154
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 4
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_690
-; CHECK-RV32-NEXT:    j .LBB61_165
-; CHECK-RV32-NEXT:  .LBB61_690: # %cond.load617
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 156
-; CHECK-RV32-NEXT:    li a4, 155
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 3
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_691
-; CHECK-RV32-NEXT:    j .LBB61_166
-; CHECK-RV32-NEXT:  .LBB61_691: # %cond.load621
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 157
-; CHECK-RV32-NEXT:    li a4, 156
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 2
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_1029
-; CHECK-RV32-NEXT:    j .LBB61_167
-; CHECK-RV32-NEXT:  .LBB61_1029: # %cond.load621
-; CHECK-RV32-NEXT:    j .LBB61_168
-; CHECK-RV32-NEXT:  .LBB61_692: # %cond.load633
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 160
-; CHECK-RV32-NEXT:    li a4, 159
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a3, a2, 1
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_693
-; CHECK-RV32-NEXT:    j .LBB61_172
-; CHECK-RV32-NEXT:  .LBB61_693: # %cond.load637
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 161
-; CHECK-RV32-NEXT:    li a4, 160
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a3, a2, 2
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_694
-; CHECK-RV32-NEXT:    j .LBB61_173
-; CHECK-RV32-NEXT:  .LBB61_694: # %cond.load641
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 162
-; CHECK-RV32-NEXT:    li a4, 161
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a3, a2, 4
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_695
-; CHECK-RV32-NEXT:    j .LBB61_174
-; CHECK-RV32-NEXT:  .LBB61_695: # %cond.load645
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 163
-; CHECK-RV32-NEXT:    li a4, 162
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a3, a2, 8
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_696
-; CHECK-RV32-NEXT:    j .LBB61_175
-; CHECK-RV32-NEXT:  .LBB61_696: # %cond.load649
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 164
-; CHECK-RV32-NEXT:    li a4, 163
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a3, a2, 16
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_697
-; CHECK-RV32-NEXT:    j .LBB61_176
-; CHECK-RV32-NEXT:  .LBB61_697: # %cond.load653
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 165
-; CHECK-RV32-NEXT:    li a4, 164
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a3, a2, 32
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_698
-; CHECK-RV32-NEXT:    j .LBB61_177
-; CHECK-RV32-NEXT:  .LBB61_698: # %cond.load657
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 166
-; CHECK-RV32-NEXT:    li a4, 165
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a3, a2, 64
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_699
-; CHECK-RV32-NEXT:    j .LBB61_178
-; CHECK-RV32-NEXT:  .LBB61_699: # %cond.load661
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 167
-; CHECK-RV32-NEXT:    li a4, 166
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a3, a2, 128
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_700
-; CHECK-RV32-NEXT:    j .LBB61_179
-; CHECK-RV32-NEXT:  .LBB61_700: # %cond.load665
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 168
-; CHECK-RV32-NEXT:    li a4, 167
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a3, a2, 256
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_701
-; CHECK-RV32-NEXT:    j .LBB61_180
-; CHECK-RV32-NEXT:  .LBB61_701: # %cond.load669
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 169
-; CHECK-RV32-NEXT:    li a4, 168
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a3, a2, 512
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_702
-; CHECK-RV32-NEXT:    j .LBB61_181
-; CHECK-RV32-NEXT:  .LBB61_702: # %cond.load673
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 170
-; CHECK-RV32-NEXT:    li a4, 169
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_703
-; CHECK-RV32-NEXT:    j .LBB61_182
-; CHECK-RV32-NEXT:  .LBB61_703: # %cond.load677
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 171
-; CHECK-RV32-NEXT:    li a4, 170
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a3, a2, 20
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_704
-; CHECK-RV32-NEXT:    j .LBB61_183
-; CHECK-RV32-NEXT:  .LBB61_704: # %cond.load681
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 172
-; CHECK-RV32-NEXT:    li a4, 171
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a3, a2, 19
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_705
-; CHECK-RV32-NEXT:    j .LBB61_184
-; CHECK-RV32-NEXT:  .LBB61_705: # %cond.load685
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 173
-; CHECK-RV32-NEXT:    li a4, 172
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a3, a2, 18
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_706
-; CHECK-RV32-NEXT:    j .LBB61_185
-; CHECK-RV32-NEXT:  .LBB61_706: # %cond.load689
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 174
-; CHECK-RV32-NEXT:    li a4, 173
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a3, a2, 17
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_707
-; CHECK-RV32-NEXT:    j .LBB61_186
-; CHECK-RV32-NEXT:  .LBB61_707: # %cond.load693
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 175
-; CHECK-RV32-NEXT:    li a4, 174
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a3, a2, 16
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_708
-; CHECK-RV32-NEXT:    j .LBB61_187
-; CHECK-RV32-NEXT:  .LBB61_708: # %cond.load697
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 176
-; CHECK-RV32-NEXT:    li a4, 175
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a3, a2, 15
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_709
-; CHECK-RV32-NEXT:    j .LBB61_188
-; CHECK-RV32-NEXT:  .LBB61_709: # %cond.load701
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 177
-; CHECK-RV32-NEXT:    li a4, 176
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a3, a2, 14
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_710
-; CHECK-RV32-NEXT:    j .LBB61_189
-; CHECK-RV32-NEXT:  .LBB61_710: # %cond.load705
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 178
-; CHECK-RV32-NEXT:    li a4, 177
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a3, a2, 13
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_711
-; CHECK-RV32-NEXT:    j .LBB61_190
-; CHECK-RV32-NEXT:  .LBB61_711: # %cond.load709
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 179
-; CHECK-RV32-NEXT:    li a4, 178
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a3, a2, 12
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_712
-; CHECK-RV32-NEXT:    j .LBB61_191
-; CHECK-RV32-NEXT:  .LBB61_712: # %cond.load713
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 180
-; CHECK-RV32-NEXT:    li a4, 179
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a3, a2, 11
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_713
-; CHECK-RV32-NEXT:    j .LBB61_192
-; CHECK-RV32-NEXT:  .LBB61_713: # %cond.load717
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 181
-; CHECK-RV32-NEXT:    li a4, 180
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a3, a2, 10
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_714
-; CHECK-RV32-NEXT:    j .LBB61_193
-; CHECK-RV32-NEXT:  .LBB61_714: # %cond.load721
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 182
-; CHECK-RV32-NEXT:    li a4, 181
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a3, a2, 9
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_715
-; CHECK-RV32-NEXT:    j .LBB61_194
-; CHECK-RV32-NEXT:  .LBB61_715: # %cond.load725
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 183
-; CHECK-RV32-NEXT:    li a4, 182
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a3, a2, 8
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_716
-; CHECK-RV32-NEXT:    j .LBB61_195
-; CHECK-RV32-NEXT:  .LBB61_716: # %cond.load729
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 184
-; CHECK-RV32-NEXT:    li a4, 183
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a3, a2, 7
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_717
-; CHECK-RV32-NEXT:    j .LBB61_196
-; CHECK-RV32-NEXT:  .LBB61_717: # %cond.load733
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 185
-; CHECK-RV32-NEXT:    li a4, 184
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a3, a2, 6
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_718
-; CHECK-RV32-NEXT:    j .LBB61_197
-; CHECK-RV32-NEXT:  .LBB61_718: # %cond.load737
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 186
-; CHECK-RV32-NEXT:    li a4, 185
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a3, a2, 5
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_719
-; CHECK-RV32-NEXT:    j .LBB61_198
-; CHECK-RV32-NEXT:  .LBB61_719: # %cond.load741
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 187
-; CHECK-RV32-NEXT:    li a4, 186
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a3, a2, 4
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_720
-; CHECK-RV32-NEXT:    j .LBB61_199
-; CHECK-RV32-NEXT:  .LBB61_720: # %cond.load745
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 188
-; CHECK-RV32-NEXT:    li a4, 187
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a3, a2, 3
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_721
-; CHECK-RV32-NEXT:    j .LBB61_200
-; CHECK-RV32-NEXT:  .LBB61_721: # %cond.load749
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 189
-; CHECK-RV32-NEXT:    li a4, 188
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a3, a2, 2
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_1030
-; CHECK-RV32-NEXT:    j .LBB61_201
-; CHECK-RV32-NEXT:  .LBB61_1030: # %cond.load749
-; CHECK-RV32-NEXT:    j .LBB61_202
-; CHECK-RV32-NEXT:  .LBB61_722: # %cond.load761
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 192
-; CHECK-RV32-NEXT:    li a4, 191
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a2, a3, 1
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_723
-; CHECK-RV32-NEXT:    j .LBB61_206
-; CHECK-RV32-NEXT:  .LBB61_723: # %cond.load765
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 193
-; CHECK-RV32-NEXT:    li a4, 192
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a2, a3, 2
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_724
-; CHECK-RV32-NEXT:    j .LBB61_207
-; CHECK-RV32-NEXT:  .LBB61_724: # %cond.load769
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 194
-; CHECK-RV32-NEXT:    li a4, 193
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a2, a3, 4
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_725
-; CHECK-RV32-NEXT:    j .LBB61_208
-; CHECK-RV32-NEXT:  .LBB61_725: # %cond.load773
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 195
-; CHECK-RV32-NEXT:    li a4, 194
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a2, a3, 8
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_726
-; CHECK-RV32-NEXT:    j .LBB61_209
-; CHECK-RV32-NEXT:  .LBB61_726: # %cond.load777
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 196
-; CHECK-RV32-NEXT:    li a4, 195
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a2, a3, 16
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_727
-; CHECK-RV32-NEXT:    j .LBB61_210
-; CHECK-RV32-NEXT:  .LBB61_727: # %cond.load781
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 197
-; CHECK-RV32-NEXT:    li a4, 196
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a2, a3, 32
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_728
-; CHECK-RV32-NEXT:    j .LBB61_211
-; CHECK-RV32-NEXT:  .LBB61_728: # %cond.load785
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 198
-; CHECK-RV32-NEXT:    li a4, 197
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a2, a3, 64
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_729
-; CHECK-RV32-NEXT:    j .LBB61_212
-; CHECK-RV32-NEXT:  .LBB61_729: # %cond.load789
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 199
-; CHECK-RV32-NEXT:    li a4, 198
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a2, a3, 128
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_730
-; CHECK-RV32-NEXT:    j .LBB61_213
-; CHECK-RV32-NEXT:  .LBB61_730: # %cond.load793
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 200
-; CHECK-RV32-NEXT:    li a4, 199
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a2, a3, 256
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_731
-; CHECK-RV32-NEXT:    j .LBB61_214
-; CHECK-RV32-NEXT:  .LBB61_731: # %cond.load797
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 201
-; CHECK-RV32-NEXT:    li a4, 200
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a2, a3, 512
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_732
-; CHECK-RV32-NEXT:    j .LBB61_215
-; CHECK-RV32-NEXT:  .LBB61_732: # %cond.load801
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 202
-; CHECK-RV32-NEXT:    li a4, 201
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_733
-; CHECK-RV32-NEXT:    j .LBB61_216
-; CHECK-RV32-NEXT:  .LBB61_733: # %cond.load805
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 203
-; CHECK-RV32-NEXT:    li a4, 202
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 20
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_734
-; CHECK-RV32-NEXT:    j .LBB61_217
-; CHECK-RV32-NEXT:  .LBB61_734: # %cond.load809
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 204
-; CHECK-RV32-NEXT:    li a4, 203
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 19
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_735
-; CHECK-RV32-NEXT:    j .LBB61_218
-; CHECK-RV32-NEXT:  .LBB61_735: # %cond.load813
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 205
-; CHECK-RV32-NEXT:    li a4, 204
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 18
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_736
-; CHECK-RV32-NEXT:    j .LBB61_219
-; CHECK-RV32-NEXT:  .LBB61_736: # %cond.load817
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 206
-; CHECK-RV32-NEXT:    li a4, 205
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 17
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_737
-; CHECK-RV32-NEXT:    j .LBB61_220
-; CHECK-RV32-NEXT:  .LBB61_737: # %cond.load821
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 207
-; CHECK-RV32-NEXT:    li a4, 206
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 16
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_738
-; CHECK-RV32-NEXT:    j .LBB61_221
-; CHECK-RV32-NEXT:  .LBB61_738: # %cond.load825
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 208
-; CHECK-RV32-NEXT:    li a4, 207
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 15
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_739
-; CHECK-RV32-NEXT:    j .LBB61_222
-; CHECK-RV32-NEXT:  .LBB61_739: # %cond.load829
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 209
-; CHECK-RV32-NEXT:    li a4, 208
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 14
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_740
-; CHECK-RV32-NEXT:    j .LBB61_223
-; CHECK-RV32-NEXT:  .LBB61_740: # %cond.load833
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 210
-; CHECK-RV32-NEXT:    li a4, 209
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 13
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_741
-; CHECK-RV32-NEXT:    j .LBB61_224
-; CHECK-RV32-NEXT:  .LBB61_741: # %cond.load837
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 211
-; CHECK-RV32-NEXT:    li a4, 210
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 12
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_742
-; CHECK-RV32-NEXT:    j .LBB61_225
-; CHECK-RV32-NEXT:  .LBB61_742: # %cond.load841
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 212
-; CHECK-RV32-NEXT:    li a4, 211
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 11
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_743
-; CHECK-RV32-NEXT:    j .LBB61_226
-; CHECK-RV32-NEXT:  .LBB61_743: # %cond.load845
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 213
-; CHECK-RV32-NEXT:    li a4, 212
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 10
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_744
-; CHECK-RV32-NEXT:    j .LBB61_227
-; CHECK-RV32-NEXT:  .LBB61_744: # %cond.load849
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 214
-; CHECK-RV32-NEXT:    li a4, 213
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 9
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_745
-; CHECK-RV32-NEXT:    j .LBB61_228
-; CHECK-RV32-NEXT:  .LBB61_745: # %cond.load853
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 215
-; CHECK-RV32-NEXT:    li a4, 214
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 8
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_746
-; CHECK-RV32-NEXT:    j .LBB61_229
-; CHECK-RV32-NEXT:  .LBB61_746: # %cond.load857
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 216
-; CHECK-RV32-NEXT:    li a4, 215
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 7
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_747
-; CHECK-RV32-NEXT:    j .LBB61_230
-; CHECK-RV32-NEXT:  .LBB61_747: # %cond.load861
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 217
-; CHECK-RV32-NEXT:    li a4, 216
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 6
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_748
-; CHECK-RV32-NEXT:    j .LBB61_231
-; CHECK-RV32-NEXT:  .LBB61_748: # %cond.load865
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 218
-; CHECK-RV32-NEXT:    li a4, 217
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 5
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_749
-; CHECK-RV32-NEXT:    j .LBB61_232
-; CHECK-RV32-NEXT:  .LBB61_749: # %cond.load869
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 219
-; CHECK-RV32-NEXT:    li a4, 218
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 4
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_750
-; CHECK-RV32-NEXT:    j .LBB61_233
-; CHECK-RV32-NEXT:  .LBB61_750: # %cond.load873
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 220
-; CHECK-RV32-NEXT:    li a4, 219
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 3
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_751
-; CHECK-RV32-NEXT:    j .LBB61_234
-; CHECK-RV32-NEXT:  .LBB61_751: # %cond.load877
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 221
-; CHECK-RV32-NEXT:    li a4, 220
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a2, a3, 2
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_1031
-; CHECK-RV32-NEXT:    j .LBB61_235
-; CHECK-RV32-NEXT:  .LBB61_1031: # %cond.load877
-; CHECK-RV32-NEXT:    j .LBB61_236
-; CHECK-RV32-NEXT:  .LBB61_752: # %cond.load889
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 224
-; CHECK-RV32-NEXT:    li a4, 223
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a3, a2, 1
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_753
-; CHECK-RV32-NEXT:    j .LBB61_240
-; CHECK-RV32-NEXT:  .LBB61_753: # %cond.load893
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 225
-; CHECK-RV32-NEXT:    li a4, 224
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a3, a2, 2
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_754
-; CHECK-RV32-NEXT:    j .LBB61_241
-; CHECK-RV32-NEXT:  .LBB61_754: # %cond.load897
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 226
-; CHECK-RV32-NEXT:    li a4, 225
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a3, a2, 4
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_755
-; CHECK-RV32-NEXT:    j .LBB61_242
-; CHECK-RV32-NEXT:  .LBB61_755: # %cond.load901
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 227
-; CHECK-RV32-NEXT:    li a4, 226
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a3, a2, 8
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_756
-; CHECK-RV32-NEXT:    j .LBB61_243
-; CHECK-RV32-NEXT:  .LBB61_756: # %cond.load905
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 228
-; CHECK-RV32-NEXT:    li a4, 227
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a3, a2, 16
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_757
-; CHECK-RV32-NEXT:    j .LBB61_244
-; CHECK-RV32-NEXT:  .LBB61_757: # %cond.load909
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 229
-; CHECK-RV32-NEXT:    li a4, 228
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a3, a2, 32
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_758
-; CHECK-RV32-NEXT:    j .LBB61_245
-; CHECK-RV32-NEXT:  .LBB61_758: # %cond.load913
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 230
-; CHECK-RV32-NEXT:    li a4, 229
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a3, a2, 64
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_759
-; CHECK-RV32-NEXT:    j .LBB61_246
-; CHECK-RV32-NEXT:  .LBB61_759: # %cond.load917
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 231
-; CHECK-RV32-NEXT:    li a4, 230
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a3, a2, 128
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_760
-; CHECK-RV32-NEXT:    j .LBB61_247
-; CHECK-RV32-NEXT:  .LBB61_760: # %cond.load921
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 232
-; CHECK-RV32-NEXT:    li a4, 231
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a3, a2, 256
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_761
-; CHECK-RV32-NEXT:    j .LBB61_248
-; CHECK-RV32-NEXT:  .LBB61_761: # %cond.load925
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 233
-; CHECK-RV32-NEXT:    li a4, 232
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a3, a2, 512
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_762
-; CHECK-RV32-NEXT:    j .LBB61_249
-; CHECK-RV32-NEXT:  .LBB61_762: # %cond.load929
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 234
-; CHECK-RV32-NEXT:    li a4, 233
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_763
-; CHECK-RV32-NEXT:    j .LBB61_250
-; CHECK-RV32-NEXT:  .LBB61_763: # %cond.load933
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 235
-; CHECK-RV32-NEXT:    li a4, 234
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a3, a2, 20
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_764
-; CHECK-RV32-NEXT:    j .LBB61_251
-; CHECK-RV32-NEXT:  .LBB61_764: # %cond.load937
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 236
-; CHECK-RV32-NEXT:    li a4, 235
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a3, a2, 19
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_765
-; CHECK-RV32-NEXT:    j .LBB61_252
-; CHECK-RV32-NEXT:  .LBB61_765: # %cond.load941
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 237
-; CHECK-RV32-NEXT:    li a4, 236
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a3, a2, 18
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_766
-; CHECK-RV32-NEXT:    j .LBB61_253
-; CHECK-RV32-NEXT:  .LBB61_766: # %cond.load945
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 238
-; CHECK-RV32-NEXT:    li a4, 237
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a3, a2, 17
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_767
-; CHECK-RV32-NEXT:    j .LBB61_254
-; CHECK-RV32-NEXT:  .LBB61_767: # %cond.load949
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 239
-; CHECK-RV32-NEXT:    li a4, 238
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a3, a2, 16
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_768
-; CHECK-RV32-NEXT:    j .LBB61_255
-; CHECK-RV32-NEXT:  .LBB61_768: # %cond.load953
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 240
-; CHECK-RV32-NEXT:    li a4, 239
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a3, a2, 15
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_769
-; CHECK-RV32-NEXT:    j .LBB61_256
-; CHECK-RV32-NEXT:  .LBB61_769: # %cond.load957
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 241
-; CHECK-RV32-NEXT:    li a4, 240
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a3, a2, 14
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_770
-; CHECK-RV32-NEXT:    j .LBB61_257
-; CHECK-RV32-NEXT:  .LBB61_770: # %cond.load961
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 242
-; CHECK-RV32-NEXT:    li a4, 241
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a3, a2, 13
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_771
-; CHECK-RV32-NEXT:    j .LBB61_258
-; CHECK-RV32-NEXT:  .LBB61_771: # %cond.load965
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 243
-; CHECK-RV32-NEXT:    li a4, 242
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a3, a2, 12
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_772
-; CHECK-RV32-NEXT:    j .LBB61_259
-; CHECK-RV32-NEXT:  .LBB61_772: # %cond.load969
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 244
-; CHECK-RV32-NEXT:    li a4, 243
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a3, a2, 11
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_773
-; CHECK-RV32-NEXT:    j .LBB61_260
-; CHECK-RV32-NEXT:  .LBB61_773: # %cond.load973
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 245
-; CHECK-RV32-NEXT:    li a4, 244
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a3, a2, 10
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_774
-; CHECK-RV32-NEXT:    j .LBB61_261
-; CHECK-RV32-NEXT:  .LBB61_774: # %cond.load977
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 246
-; CHECK-RV32-NEXT:    li a4, 245
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a3, a2, 9
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_775
-; CHECK-RV32-NEXT:    j .LBB61_262
-; CHECK-RV32-NEXT:  .LBB61_775: # %cond.load981
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 247
-; CHECK-RV32-NEXT:    li a4, 246
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a3, a2, 8
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_776
-; CHECK-RV32-NEXT:    j .LBB61_263
-; CHECK-RV32-NEXT:  .LBB61_776: # %cond.load985
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 248
-; CHECK-RV32-NEXT:    li a4, 247
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a3, a2, 7
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_777
-; CHECK-RV32-NEXT:    j .LBB61_264
-; CHECK-RV32-NEXT:  .LBB61_777: # %cond.load989
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 249
-; CHECK-RV32-NEXT:    li a4, 248
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a3, a2, 6
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_778
-; CHECK-RV32-NEXT:    j .LBB61_265
-; CHECK-RV32-NEXT:  .LBB61_778: # %cond.load993
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 250
-; CHECK-RV32-NEXT:    li a4, 249
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a3, a2, 5
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_779
-; CHECK-RV32-NEXT:    j .LBB61_266
-; CHECK-RV32-NEXT:  .LBB61_779: # %cond.load997
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 251
-; CHECK-RV32-NEXT:    li a4, 250
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a3, a2, 4
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_780
-; CHECK-RV32-NEXT:    j .LBB61_267
-; CHECK-RV32-NEXT:  .LBB61_780: # %cond.load1001
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 252
-; CHECK-RV32-NEXT:    li a4, 251
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a3, a2, 3
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_781
-; CHECK-RV32-NEXT:    j .LBB61_268
-; CHECK-RV32-NEXT:  .LBB61_781: # %cond.load1005
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a3, 253
-; CHECK-RV32-NEXT:    li a4, 252
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    slli a3, a2, 2
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_1032
-; CHECK-RV32-NEXT:    j .LBB61_269
-; CHECK-RV32-NEXT:  .LBB61_1032: # %cond.load1005
-; CHECK-RV32-NEXT:    j .LBB61_270
-; CHECK-RV32-NEXT:  .LBB61_782: # %cond.load1017
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-RV32-NEXT:    li a2, 256
-; CHECK-RV32-NEXT:    li a4, 255
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-RV32-NEXT:    andi a2, a3, 1
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_783
-; CHECK-RV32-NEXT:    j .LBB61_274
-; CHECK-RV32-NEXT:  .LBB61_783: # %cond.load1021
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 257
-; CHECK-RV32-NEXT:    li a4, 256
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 2
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_784
-; CHECK-RV32-NEXT:    j .LBB61_275
-; CHECK-RV32-NEXT:  .LBB61_784: # %cond.load1025
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 258
-; CHECK-RV32-NEXT:    li a4, 257
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 4
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_785
-; CHECK-RV32-NEXT:    j .LBB61_276
-; CHECK-RV32-NEXT:  .LBB61_785: # %cond.load1029
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 259
-; CHECK-RV32-NEXT:    li a4, 258
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 8
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_786
-; CHECK-RV32-NEXT:    j .LBB61_277
-; CHECK-RV32-NEXT:  .LBB61_786: # %cond.load1033
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 260
-; CHECK-RV32-NEXT:    li a4, 259
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 16
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_787
-; CHECK-RV32-NEXT:    j .LBB61_278
-; CHECK-RV32-NEXT:  .LBB61_787: # %cond.load1037
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 261
-; CHECK-RV32-NEXT:    li a4, 260
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 32
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_788
-; CHECK-RV32-NEXT:    j .LBB61_279
-; CHECK-RV32-NEXT:  .LBB61_788: # %cond.load1041
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 262
-; CHECK-RV32-NEXT:    li a4, 261
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 64
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_789
-; CHECK-RV32-NEXT:    j .LBB61_280
-; CHECK-RV32-NEXT:  .LBB61_789: # %cond.load1045
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 263
-; CHECK-RV32-NEXT:    li a4, 262
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 128
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_790
-; CHECK-RV32-NEXT:    j .LBB61_281
-; CHECK-RV32-NEXT:  .LBB61_790: # %cond.load1049
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 264
-; CHECK-RV32-NEXT:    li a4, 263
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 256
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_791
-; CHECK-RV32-NEXT:    j .LBB61_282
-; CHECK-RV32-NEXT:  .LBB61_791: # %cond.load1053
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 265
-; CHECK-RV32-NEXT:    li a4, 264
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 512
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_792
-; CHECK-RV32-NEXT:    j .LBB61_283
-; CHECK-RV32-NEXT:  .LBB61_792: # %cond.load1057
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 266
-; CHECK-RV32-NEXT:    li a4, 265
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_793
-; CHECK-RV32-NEXT:    j .LBB61_284
-; CHECK-RV32-NEXT:  .LBB61_793: # %cond.load1061
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 267
-; CHECK-RV32-NEXT:    li a4, 266
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 20
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_794
-; CHECK-RV32-NEXT:    j .LBB61_285
-; CHECK-RV32-NEXT:  .LBB61_794: # %cond.load1065
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 268
-; CHECK-RV32-NEXT:    li a4, 267
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 19
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_795
-; CHECK-RV32-NEXT:    j .LBB61_286
-; CHECK-RV32-NEXT:  .LBB61_795: # %cond.load1069
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 269
-; CHECK-RV32-NEXT:    li a4, 268
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 18
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_796
-; CHECK-RV32-NEXT:    j .LBB61_287
-; CHECK-RV32-NEXT:  .LBB61_796: # %cond.load1073
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 270
-; CHECK-RV32-NEXT:    li a4, 269
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 17
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_797
-; CHECK-RV32-NEXT:    j .LBB61_288
-; CHECK-RV32-NEXT:  .LBB61_797: # %cond.load1077
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 271
-; CHECK-RV32-NEXT:    li a4, 270
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 16
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_798
-; CHECK-RV32-NEXT:    j .LBB61_289
-; CHECK-RV32-NEXT:  .LBB61_798: # %cond.load1081
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 272
-; CHECK-RV32-NEXT:    li a4, 271
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 15
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_799
-; CHECK-RV32-NEXT:    j .LBB61_290
-; CHECK-RV32-NEXT:  .LBB61_799: # %cond.load1085
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 273
-; CHECK-RV32-NEXT:    li a4, 272
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 14
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_800
-; CHECK-RV32-NEXT:    j .LBB61_291
-; CHECK-RV32-NEXT:  .LBB61_800: # %cond.load1089
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 274
-; CHECK-RV32-NEXT:    li a4, 273
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 13
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_801
-; CHECK-RV32-NEXT:    j .LBB61_292
-; CHECK-RV32-NEXT:  .LBB61_801: # %cond.load1093
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 275
-; CHECK-RV32-NEXT:    li a4, 274
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 12
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_802
-; CHECK-RV32-NEXT:    j .LBB61_293
-; CHECK-RV32-NEXT:  .LBB61_802: # %cond.load1097
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 276
-; CHECK-RV32-NEXT:    li a4, 275
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 11
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_803
-; CHECK-RV32-NEXT:    j .LBB61_294
-; CHECK-RV32-NEXT:  .LBB61_803: # %cond.load1101
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 277
-; CHECK-RV32-NEXT:    li a4, 276
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 10
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_804
-; CHECK-RV32-NEXT:    j .LBB61_295
-; CHECK-RV32-NEXT:  .LBB61_804: # %cond.load1105
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 278
-; CHECK-RV32-NEXT:    li a4, 277
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 9
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_805
-; CHECK-RV32-NEXT:    j .LBB61_296
-; CHECK-RV32-NEXT:  .LBB61_805: # %cond.load1109
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 279
-; CHECK-RV32-NEXT:    li a4, 278
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 8
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_806
-; CHECK-RV32-NEXT:    j .LBB61_297
-; CHECK-RV32-NEXT:  .LBB61_806: # %cond.load1113
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 280
-; CHECK-RV32-NEXT:    li a4, 279
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 7
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_807
-; CHECK-RV32-NEXT:    j .LBB61_298
-; CHECK-RV32-NEXT:  .LBB61_807: # %cond.load1117
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 281
-; CHECK-RV32-NEXT:    li a4, 280
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 6
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_808
-; CHECK-RV32-NEXT:    j .LBB61_299
-; CHECK-RV32-NEXT:  .LBB61_808: # %cond.load1121
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 282
-; CHECK-RV32-NEXT:    li a4, 281
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 5
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_809
-; CHECK-RV32-NEXT:    j .LBB61_300
-; CHECK-RV32-NEXT:  .LBB61_809: # %cond.load1125
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 283
-; CHECK-RV32-NEXT:    li a4, 282
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 4
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_810
-; CHECK-RV32-NEXT:    j .LBB61_301
-; CHECK-RV32-NEXT:  .LBB61_810: # %cond.load1129
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 284
-; CHECK-RV32-NEXT:    li a4, 283
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 3
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_811
-; CHECK-RV32-NEXT:    j .LBB61_302
-; CHECK-RV32-NEXT:  .LBB61_811: # %cond.load1133
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 285
-; CHECK-RV32-NEXT:    li a4, 284
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 2
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_1033
-; CHECK-RV32-NEXT:    j .LBB61_303
-; CHECK-RV32-NEXT:  .LBB61_1033: # %cond.load1133
-; CHECK-RV32-NEXT:    j .LBB61_304
-; CHECK-RV32-NEXT:  .LBB61_812: # %cond.load1145
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 288
-; CHECK-RV32-NEXT:    li a4, 287
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a3, a2, 1
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_813
-; CHECK-RV32-NEXT:    j .LBB61_308
-; CHECK-RV32-NEXT:  .LBB61_813: # %cond.load1149
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 289
-; CHECK-RV32-NEXT:    li a4, 288
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a3, a2, 2
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_814
-; CHECK-RV32-NEXT:    j .LBB61_309
-; CHECK-RV32-NEXT:  .LBB61_814: # %cond.load1153
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 290
-; CHECK-RV32-NEXT:    li a4, 289
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a3, a2, 4
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_815
-; CHECK-RV32-NEXT:    j .LBB61_310
-; CHECK-RV32-NEXT:  .LBB61_815: # %cond.load1157
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 291
-; CHECK-RV32-NEXT:    li a4, 290
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a3, a2, 8
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_816
-; CHECK-RV32-NEXT:    j .LBB61_311
-; CHECK-RV32-NEXT:  .LBB61_816: # %cond.load1161
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 292
-; CHECK-RV32-NEXT:    li a4, 291
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a3, a2, 16
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_817
-; CHECK-RV32-NEXT:    j .LBB61_312
-; CHECK-RV32-NEXT:  .LBB61_817: # %cond.load1165
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 293
-; CHECK-RV32-NEXT:    li a4, 292
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a3, a2, 32
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_818
-; CHECK-RV32-NEXT:    j .LBB61_313
-; CHECK-RV32-NEXT:  .LBB61_818: # %cond.load1169
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 294
-; CHECK-RV32-NEXT:    li a4, 293
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a3, a2, 64
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_819
-; CHECK-RV32-NEXT:    j .LBB61_314
-; CHECK-RV32-NEXT:  .LBB61_819: # %cond.load1173
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 295
-; CHECK-RV32-NEXT:    li a4, 294
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a3, a2, 128
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_820
-; CHECK-RV32-NEXT:    j .LBB61_315
-; CHECK-RV32-NEXT:  .LBB61_820: # %cond.load1177
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 296
-; CHECK-RV32-NEXT:    li a4, 295
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a3, a2, 256
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_821
-; CHECK-RV32-NEXT:    j .LBB61_316
-; CHECK-RV32-NEXT:  .LBB61_821: # %cond.load1181
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 297
-; CHECK-RV32-NEXT:    li a4, 296
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a3, a2, 512
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_822
-; CHECK-RV32-NEXT:    j .LBB61_317
-; CHECK-RV32-NEXT:  .LBB61_822: # %cond.load1185
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 298
-; CHECK-RV32-NEXT:    li a4, 297
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_823
-; CHECK-RV32-NEXT:    j .LBB61_318
-; CHECK-RV32-NEXT:  .LBB61_823: # %cond.load1189
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 299
-; CHECK-RV32-NEXT:    li a4, 298
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 20
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_824
-; CHECK-RV32-NEXT:    j .LBB61_319
-; CHECK-RV32-NEXT:  .LBB61_824: # %cond.load1193
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 300
-; CHECK-RV32-NEXT:    li a4, 299
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 19
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_825
-; CHECK-RV32-NEXT:    j .LBB61_320
-; CHECK-RV32-NEXT:  .LBB61_825: # %cond.load1197
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 301
-; CHECK-RV32-NEXT:    li a4, 300
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 18
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_826
-; CHECK-RV32-NEXT:    j .LBB61_321
-; CHECK-RV32-NEXT:  .LBB61_826: # %cond.load1201
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 302
-; CHECK-RV32-NEXT:    li a4, 301
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 17
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_827
-; CHECK-RV32-NEXT:    j .LBB61_322
-; CHECK-RV32-NEXT:  .LBB61_827: # %cond.load1205
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 303
-; CHECK-RV32-NEXT:    li a4, 302
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 16
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_828
-; CHECK-RV32-NEXT:    j .LBB61_323
-; CHECK-RV32-NEXT:  .LBB61_828: # %cond.load1209
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 304
-; CHECK-RV32-NEXT:    li a4, 303
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 15
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_829
-; CHECK-RV32-NEXT:    j .LBB61_324
-; CHECK-RV32-NEXT:  .LBB61_829: # %cond.load1213
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 305
-; CHECK-RV32-NEXT:    li a4, 304
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 14
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_830
-; CHECK-RV32-NEXT:    j .LBB61_325
-; CHECK-RV32-NEXT:  .LBB61_830: # %cond.load1217
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 306
-; CHECK-RV32-NEXT:    li a4, 305
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 13
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_831
-; CHECK-RV32-NEXT:    j .LBB61_326
-; CHECK-RV32-NEXT:  .LBB61_831: # %cond.load1221
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 307
-; CHECK-RV32-NEXT:    li a4, 306
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 12
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_832
-; CHECK-RV32-NEXT:    j .LBB61_327
-; CHECK-RV32-NEXT:  .LBB61_832: # %cond.load1225
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 308
-; CHECK-RV32-NEXT:    li a4, 307
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 11
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_833
-; CHECK-RV32-NEXT:    j .LBB61_328
-; CHECK-RV32-NEXT:  .LBB61_833: # %cond.load1229
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 309
-; CHECK-RV32-NEXT:    li a4, 308
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 10
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_834
-; CHECK-RV32-NEXT:    j .LBB61_329
-; CHECK-RV32-NEXT:  .LBB61_834: # %cond.load1233
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 310
-; CHECK-RV32-NEXT:    li a4, 309
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 9
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_835
-; CHECK-RV32-NEXT:    j .LBB61_330
-; CHECK-RV32-NEXT:  .LBB61_835: # %cond.load1237
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 311
-; CHECK-RV32-NEXT:    li a4, 310
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 8
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_836
-; CHECK-RV32-NEXT:    j .LBB61_331
-; CHECK-RV32-NEXT:  .LBB61_836: # %cond.load1241
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 312
-; CHECK-RV32-NEXT:    li a4, 311
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 7
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_837
-; CHECK-RV32-NEXT:    j .LBB61_332
-; CHECK-RV32-NEXT:  .LBB61_837: # %cond.load1245
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 313
-; CHECK-RV32-NEXT:    li a4, 312
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 6
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_838
-; CHECK-RV32-NEXT:    j .LBB61_333
-; CHECK-RV32-NEXT:  .LBB61_838: # %cond.load1249
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 314
-; CHECK-RV32-NEXT:    li a4, 313
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 5
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_839
-; CHECK-RV32-NEXT:    j .LBB61_334
-; CHECK-RV32-NEXT:  .LBB61_839: # %cond.load1253
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 315
-; CHECK-RV32-NEXT:    li a4, 314
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 4
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_840
-; CHECK-RV32-NEXT:    j .LBB61_335
-; CHECK-RV32-NEXT:  .LBB61_840: # %cond.load1257
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 316
-; CHECK-RV32-NEXT:    li a4, 315
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 3
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_841
-; CHECK-RV32-NEXT:    j .LBB61_336
-; CHECK-RV32-NEXT:  .LBB61_841: # %cond.load1261
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 317
-; CHECK-RV32-NEXT:    li a4, 316
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 2
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_1034
-; CHECK-RV32-NEXT:    j .LBB61_337
-; CHECK-RV32-NEXT:  .LBB61_1034: # %cond.load1261
-; CHECK-RV32-NEXT:    j .LBB61_338
-; CHECK-RV32-NEXT:  .LBB61_842: # %cond.load1273
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 320
-; CHECK-RV32-NEXT:    li a4, 319
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 1
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_843
-; CHECK-RV32-NEXT:    j .LBB61_342
-; CHECK-RV32-NEXT:  .LBB61_843: # %cond.load1277
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 321
-; CHECK-RV32-NEXT:    li a4, 320
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 2
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_844
-; CHECK-RV32-NEXT:    j .LBB61_343
-; CHECK-RV32-NEXT:  .LBB61_844: # %cond.load1281
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 322
-; CHECK-RV32-NEXT:    li a4, 321
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 4
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_845
-; CHECK-RV32-NEXT:    j .LBB61_344
-; CHECK-RV32-NEXT:  .LBB61_845: # %cond.load1285
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 323
-; CHECK-RV32-NEXT:    li a4, 322
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 8
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_846
-; CHECK-RV32-NEXT:    j .LBB61_345
-; CHECK-RV32-NEXT:  .LBB61_846: # %cond.load1289
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 324
-; CHECK-RV32-NEXT:    li a4, 323
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 16
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_847
-; CHECK-RV32-NEXT:    j .LBB61_346
-; CHECK-RV32-NEXT:  .LBB61_847: # %cond.load1293
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 325
-; CHECK-RV32-NEXT:    li a4, 324
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 32
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_848
-; CHECK-RV32-NEXT:    j .LBB61_347
-; CHECK-RV32-NEXT:  .LBB61_848: # %cond.load1297
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 326
-; CHECK-RV32-NEXT:    li a4, 325
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 64
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_849
-; CHECK-RV32-NEXT:    j .LBB61_348
-; CHECK-RV32-NEXT:  .LBB61_849: # %cond.load1301
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 327
-; CHECK-RV32-NEXT:    li a4, 326
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 128
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_850
-; CHECK-RV32-NEXT:    j .LBB61_349
-; CHECK-RV32-NEXT:  .LBB61_850: # %cond.load1305
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 328
-; CHECK-RV32-NEXT:    li a4, 327
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 256
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_851
-; CHECK-RV32-NEXT:    j .LBB61_350
-; CHECK-RV32-NEXT:  .LBB61_851: # %cond.load1309
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 329
-; CHECK-RV32-NEXT:    li a4, 328
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 512
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_852
-; CHECK-RV32-NEXT:    j .LBB61_351
-; CHECK-RV32-NEXT:  .LBB61_852: # %cond.load1313
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 330
-; CHECK-RV32-NEXT:    li a4, 329
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_853
-; CHECK-RV32-NEXT:    j .LBB61_352
-; CHECK-RV32-NEXT:  .LBB61_853: # %cond.load1317
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 331
-; CHECK-RV32-NEXT:    li a4, 330
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 20
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_854
-; CHECK-RV32-NEXT:    j .LBB61_353
-; CHECK-RV32-NEXT:  .LBB61_854: # %cond.load1321
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 332
-; CHECK-RV32-NEXT:    li a4, 331
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 19
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_855
-; CHECK-RV32-NEXT:    j .LBB61_354
-; CHECK-RV32-NEXT:  .LBB61_855: # %cond.load1325
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 333
-; CHECK-RV32-NEXT:    li a4, 332
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 18
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_856
-; CHECK-RV32-NEXT:    j .LBB61_355
-; CHECK-RV32-NEXT:  .LBB61_856: # %cond.load1329
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 334
-; CHECK-RV32-NEXT:    li a4, 333
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 17
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_857
-; CHECK-RV32-NEXT:    j .LBB61_356
-; CHECK-RV32-NEXT:  .LBB61_857: # %cond.load1333
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 335
-; CHECK-RV32-NEXT:    li a4, 334
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 16
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_858
-; CHECK-RV32-NEXT:    j .LBB61_357
-; CHECK-RV32-NEXT:  .LBB61_858: # %cond.load1337
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 336
-; CHECK-RV32-NEXT:    li a4, 335
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 15
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_859
-; CHECK-RV32-NEXT:    j .LBB61_358
-; CHECK-RV32-NEXT:  .LBB61_859: # %cond.load1341
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 337
-; CHECK-RV32-NEXT:    li a4, 336
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 14
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_860
-; CHECK-RV32-NEXT:    j .LBB61_359
-; CHECK-RV32-NEXT:  .LBB61_860: # %cond.load1345
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 338
-; CHECK-RV32-NEXT:    li a4, 337
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 13
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_861
-; CHECK-RV32-NEXT:    j .LBB61_360
-; CHECK-RV32-NEXT:  .LBB61_861: # %cond.load1349
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 339
-; CHECK-RV32-NEXT:    li a4, 338
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 12
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_862
-; CHECK-RV32-NEXT:    j .LBB61_361
-; CHECK-RV32-NEXT:  .LBB61_862: # %cond.load1353
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 340
-; CHECK-RV32-NEXT:    li a4, 339
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 11
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_863
-; CHECK-RV32-NEXT:    j .LBB61_362
-; CHECK-RV32-NEXT:  .LBB61_863: # %cond.load1357
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 341
-; CHECK-RV32-NEXT:    li a4, 340
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 10
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_864
-; CHECK-RV32-NEXT:    j .LBB61_363
-; CHECK-RV32-NEXT:  .LBB61_864: # %cond.load1361
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 342
-; CHECK-RV32-NEXT:    li a4, 341
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 9
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_865
-; CHECK-RV32-NEXT:    j .LBB61_364
-; CHECK-RV32-NEXT:  .LBB61_865: # %cond.load1365
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 343
-; CHECK-RV32-NEXT:    li a4, 342
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 8
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_866
-; CHECK-RV32-NEXT:    j .LBB61_365
-; CHECK-RV32-NEXT:  .LBB61_866: # %cond.load1369
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 344
-; CHECK-RV32-NEXT:    li a4, 343
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 7
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_867
-; CHECK-RV32-NEXT:    j .LBB61_366
-; CHECK-RV32-NEXT:  .LBB61_867: # %cond.load1373
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 345
-; CHECK-RV32-NEXT:    li a4, 344
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 6
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_868
-; CHECK-RV32-NEXT:    j .LBB61_367
-; CHECK-RV32-NEXT:  .LBB61_868: # %cond.load1377
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 346
-; CHECK-RV32-NEXT:    li a4, 345
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 5
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_869
-; CHECK-RV32-NEXT:    j .LBB61_368
-; CHECK-RV32-NEXT:  .LBB61_869: # %cond.load1381
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 347
-; CHECK-RV32-NEXT:    li a4, 346
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 4
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_870
-; CHECK-RV32-NEXT:    j .LBB61_369
-; CHECK-RV32-NEXT:  .LBB61_870: # %cond.load1385
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 348
-; CHECK-RV32-NEXT:    li a4, 347
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 3
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_871
-; CHECK-RV32-NEXT:    j .LBB61_370
-; CHECK-RV32-NEXT:  .LBB61_871: # %cond.load1389
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 349
-; CHECK-RV32-NEXT:    li a4, 348
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 2
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_1035
-; CHECK-RV32-NEXT:    j .LBB61_371
-; CHECK-RV32-NEXT:  .LBB61_1035: # %cond.load1389
-; CHECK-RV32-NEXT:    j .LBB61_372
-; CHECK-RV32-NEXT:  .LBB61_872: # %cond.load1401
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 352
-; CHECK-RV32-NEXT:    li a4, 351
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a3, a2, 1
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_873
-; CHECK-RV32-NEXT:    j .LBB61_376
-; CHECK-RV32-NEXT:  .LBB61_873: # %cond.load1405
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 353
-; CHECK-RV32-NEXT:    li a4, 352
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a3, a2, 2
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_874
-; CHECK-RV32-NEXT:    j .LBB61_377
-; CHECK-RV32-NEXT:  .LBB61_874: # %cond.load1409
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 354
-; CHECK-RV32-NEXT:    li a4, 353
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a3, a2, 4
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_875
-; CHECK-RV32-NEXT:    j .LBB61_378
-; CHECK-RV32-NEXT:  .LBB61_875: # %cond.load1413
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 355
-; CHECK-RV32-NEXT:    li a4, 354
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a3, a2, 8
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_876
-; CHECK-RV32-NEXT:    j .LBB61_379
-; CHECK-RV32-NEXT:  .LBB61_876: # %cond.load1417
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 356
-; CHECK-RV32-NEXT:    li a4, 355
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a3, a2, 16
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_877
-; CHECK-RV32-NEXT:    j .LBB61_380
-; CHECK-RV32-NEXT:  .LBB61_877: # %cond.load1421
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 357
-; CHECK-RV32-NEXT:    li a4, 356
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a3, a2, 32
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_878
-; CHECK-RV32-NEXT:    j .LBB61_381
-; CHECK-RV32-NEXT:  .LBB61_878: # %cond.load1425
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 358
-; CHECK-RV32-NEXT:    li a4, 357
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a3, a2, 64
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_879
-; CHECK-RV32-NEXT:    j .LBB61_382
-; CHECK-RV32-NEXT:  .LBB61_879: # %cond.load1429
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 359
-; CHECK-RV32-NEXT:    li a4, 358
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a3, a2, 128
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_880
-; CHECK-RV32-NEXT:    j .LBB61_383
-; CHECK-RV32-NEXT:  .LBB61_880: # %cond.load1433
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 360
-; CHECK-RV32-NEXT:    li a4, 359
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a3, a2, 256
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_881
-; CHECK-RV32-NEXT:    j .LBB61_384
-; CHECK-RV32-NEXT:  .LBB61_881: # %cond.load1437
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 361
-; CHECK-RV32-NEXT:    li a4, 360
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a3, a2, 512
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_882
-; CHECK-RV32-NEXT:    j .LBB61_385
-; CHECK-RV32-NEXT:  .LBB61_882: # %cond.load1441
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 362
-; CHECK-RV32-NEXT:    li a4, 361
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_883
-; CHECK-RV32-NEXT:    j .LBB61_386
-; CHECK-RV32-NEXT:  .LBB61_883: # %cond.load1445
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 363
-; CHECK-RV32-NEXT:    li a4, 362
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 20
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_884
-; CHECK-RV32-NEXT:    j .LBB61_387
-; CHECK-RV32-NEXT:  .LBB61_884: # %cond.load1449
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 364
-; CHECK-RV32-NEXT:    li a4, 363
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 19
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_885
-; CHECK-RV32-NEXT:    j .LBB61_388
-; CHECK-RV32-NEXT:  .LBB61_885: # %cond.load1453
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 365
-; CHECK-RV32-NEXT:    li a4, 364
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 18
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_886
-; CHECK-RV32-NEXT:    j .LBB61_389
-; CHECK-RV32-NEXT:  .LBB61_886: # %cond.load1457
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 366
-; CHECK-RV32-NEXT:    li a4, 365
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 17
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_887
-; CHECK-RV32-NEXT:    j .LBB61_390
-; CHECK-RV32-NEXT:  .LBB61_887: # %cond.load1461
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 367
-; CHECK-RV32-NEXT:    li a4, 366
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 16
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_888
-; CHECK-RV32-NEXT:    j .LBB61_391
-; CHECK-RV32-NEXT:  .LBB61_888: # %cond.load1465
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 368
-; CHECK-RV32-NEXT:    li a4, 367
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 15
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_889
-; CHECK-RV32-NEXT:    j .LBB61_392
-; CHECK-RV32-NEXT:  .LBB61_889: # %cond.load1469
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 369
-; CHECK-RV32-NEXT:    li a4, 368
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 14
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_890
-; CHECK-RV32-NEXT:    j .LBB61_393
-; CHECK-RV32-NEXT:  .LBB61_890: # %cond.load1473
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 370
-; CHECK-RV32-NEXT:    li a4, 369
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 13
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_891
-; CHECK-RV32-NEXT:    j .LBB61_394
-; CHECK-RV32-NEXT:  .LBB61_891: # %cond.load1477
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 371
-; CHECK-RV32-NEXT:    li a4, 370
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 12
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_892
-; CHECK-RV32-NEXT:    j .LBB61_395
-; CHECK-RV32-NEXT:  .LBB61_892: # %cond.load1481
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 372
-; CHECK-RV32-NEXT:    li a4, 371
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 11
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_893
-; CHECK-RV32-NEXT:    j .LBB61_396
-; CHECK-RV32-NEXT:  .LBB61_893: # %cond.load1485
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 373
-; CHECK-RV32-NEXT:    li a4, 372
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 10
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_894
-; CHECK-RV32-NEXT:    j .LBB61_397
-; CHECK-RV32-NEXT:  .LBB61_894: # %cond.load1489
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 374
-; CHECK-RV32-NEXT:    li a4, 373
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 9
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_895
-; CHECK-RV32-NEXT:    j .LBB61_398
-; CHECK-RV32-NEXT:  .LBB61_895: # %cond.load1493
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 375
-; CHECK-RV32-NEXT:    li a4, 374
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 8
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_896
-; CHECK-RV32-NEXT:    j .LBB61_399
-; CHECK-RV32-NEXT:  .LBB61_896: # %cond.load1497
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 376
-; CHECK-RV32-NEXT:    li a4, 375
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 7
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_897
-; CHECK-RV32-NEXT:    j .LBB61_400
-; CHECK-RV32-NEXT:  .LBB61_897: # %cond.load1501
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 377
-; CHECK-RV32-NEXT:    li a4, 376
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 6
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_898
-; CHECK-RV32-NEXT:    j .LBB61_401
-; CHECK-RV32-NEXT:  .LBB61_898: # %cond.load1505
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 378
-; CHECK-RV32-NEXT:    li a4, 377
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 5
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_899
-; CHECK-RV32-NEXT:    j .LBB61_402
-; CHECK-RV32-NEXT:  .LBB61_899: # %cond.load1509
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 379
-; CHECK-RV32-NEXT:    li a4, 378
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 4
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_900
-; CHECK-RV32-NEXT:    j .LBB61_403
-; CHECK-RV32-NEXT:  .LBB61_900: # %cond.load1513
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 380
-; CHECK-RV32-NEXT:    li a4, 379
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 3
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_901
-; CHECK-RV32-NEXT:    j .LBB61_404
-; CHECK-RV32-NEXT:  .LBB61_901: # %cond.load1517
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 381
-; CHECK-RV32-NEXT:    li a4, 380
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 2
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_1036
-; CHECK-RV32-NEXT:    j .LBB61_405
-; CHECK-RV32-NEXT:  .LBB61_1036: # %cond.load1517
-; CHECK-RV32-NEXT:    j .LBB61_406
-; CHECK-RV32-NEXT:  .LBB61_902: # %cond.load1529
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 384
-; CHECK-RV32-NEXT:    li a4, 383
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 1
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_903
-; CHECK-RV32-NEXT:    j .LBB61_410
-; CHECK-RV32-NEXT:  .LBB61_903: # %cond.load1533
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 385
-; CHECK-RV32-NEXT:    li a4, 384
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 2
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_904
-; CHECK-RV32-NEXT:    j .LBB61_411
-; CHECK-RV32-NEXT:  .LBB61_904: # %cond.load1537
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 386
-; CHECK-RV32-NEXT:    li a4, 385
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 4
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_905
-; CHECK-RV32-NEXT:    j .LBB61_412
-; CHECK-RV32-NEXT:  .LBB61_905: # %cond.load1541
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 387
-; CHECK-RV32-NEXT:    li a4, 386
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 8
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_906
-; CHECK-RV32-NEXT:    j .LBB61_413
-; CHECK-RV32-NEXT:  .LBB61_906: # %cond.load1545
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 388
-; CHECK-RV32-NEXT:    li a4, 387
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 16
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_907
-; CHECK-RV32-NEXT:    j .LBB61_414
-; CHECK-RV32-NEXT:  .LBB61_907: # %cond.load1549
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 389
-; CHECK-RV32-NEXT:    li a4, 388
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 32
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_908
-; CHECK-RV32-NEXT:    j .LBB61_415
-; CHECK-RV32-NEXT:  .LBB61_908: # %cond.load1553
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 390
-; CHECK-RV32-NEXT:    li a4, 389
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 64
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_909
-; CHECK-RV32-NEXT:    j .LBB61_416
-; CHECK-RV32-NEXT:  .LBB61_909: # %cond.load1557
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 391
-; CHECK-RV32-NEXT:    li a4, 390
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 128
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_910
-; CHECK-RV32-NEXT:    j .LBB61_417
-; CHECK-RV32-NEXT:  .LBB61_910: # %cond.load1561
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 392
-; CHECK-RV32-NEXT:    li a4, 391
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 256
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_911
-; CHECK-RV32-NEXT:    j .LBB61_418
-; CHECK-RV32-NEXT:  .LBB61_911: # %cond.load1565
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 393
-; CHECK-RV32-NEXT:    li a4, 392
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 512
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_912
-; CHECK-RV32-NEXT:    j .LBB61_419
-; CHECK-RV32-NEXT:  .LBB61_912: # %cond.load1569
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 394
-; CHECK-RV32-NEXT:    li a4, 393
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_913
-; CHECK-RV32-NEXT:    j .LBB61_420
-; CHECK-RV32-NEXT:  .LBB61_913: # %cond.load1573
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 395
-; CHECK-RV32-NEXT:    li a4, 394
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 20
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_914
-; CHECK-RV32-NEXT:    j .LBB61_421
-; CHECK-RV32-NEXT:  .LBB61_914: # %cond.load1577
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 396
-; CHECK-RV32-NEXT:    li a4, 395
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 19
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_915
-; CHECK-RV32-NEXT:    j .LBB61_422
-; CHECK-RV32-NEXT:  .LBB61_915: # %cond.load1581
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 397
-; CHECK-RV32-NEXT:    li a4, 396
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 18
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_916
-; CHECK-RV32-NEXT:    j .LBB61_423
-; CHECK-RV32-NEXT:  .LBB61_916: # %cond.load1585
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 398
-; CHECK-RV32-NEXT:    li a4, 397
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 17
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_917
-; CHECK-RV32-NEXT:    j .LBB61_424
-; CHECK-RV32-NEXT:  .LBB61_917: # %cond.load1589
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 399
-; CHECK-RV32-NEXT:    li a4, 398
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 16
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_918
-; CHECK-RV32-NEXT:    j .LBB61_425
-; CHECK-RV32-NEXT:  .LBB61_918: # %cond.load1593
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 400
-; CHECK-RV32-NEXT:    li a4, 399
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 15
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_919
-; CHECK-RV32-NEXT:    j .LBB61_426
-; CHECK-RV32-NEXT:  .LBB61_919: # %cond.load1597
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 401
-; CHECK-RV32-NEXT:    li a4, 400
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 14
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_920
-; CHECK-RV32-NEXT:    j .LBB61_427
-; CHECK-RV32-NEXT:  .LBB61_920: # %cond.load1601
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 402
-; CHECK-RV32-NEXT:    li a4, 401
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 13
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_921
-; CHECK-RV32-NEXT:    j .LBB61_428
-; CHECK-RV32-NEXT:  .LBB61_921: # %cond.load1605
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 403
-; CHECK-RV32-NEXT:    li a4, 402
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 12
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_922
-; CHECK-RV32-NEXT:    j .LBB61_429
-; CHECK-RV32-NEXT:  .LBB61_922: # %cond.load1609
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 404
-; CHECK-RV32-NEXT:    li a4, 403
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 11
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_923
-; CHECK-RV32-NEXT:    j .LBB61_430
-; CHECK-RV32-NEXT:  .LBB61_923: # %cond.load1613
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 405
-; CHECK-RV32-NEXT:    li a4, 404
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 10
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_924
-; CHECK-RV32-NEXT:    j .LBB61_431
-; CHECK-RV32-NEXT:  .LBB61_924: # %cond.load1617
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 406
-; CHECK-RV32-NEXT:    li a4, 405
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 9
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_925
-; CHECK-RV32-NEXT:    j .LBB61_432
-; CHECK-RV32-NEXT:  .LBB61_925: # %cond.load1621
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 407
-; CHECK-RV32-NEXT:    li a4, 406
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 8
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_926
-; CHECK-RV32-NEXT:    j .LBB61_433
-; CHECK-RV32-NEXT:  .LBB61_926: # %cond.load1625
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 408
-; CHECK-RV32-NEXT:    li a4, 407
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 7
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_927
-; CHECK-RV32-NEXT:    j .LBB61_434
-; CHECK-RV32-NEXT:  .LBB61_927: # %cond.load1629
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 409
-; CHECK-RV32-NEXT:    li a4, 408
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 6
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_928
-; CHECK-RV32-NEXT:    j .LBB61_435
-; CHECK-RV32-NEXT:  .LBB61_928: # %cond.load1633
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 410
-; CHECK-RV32-NEXT:    li a4, 409
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 5
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_929
-; CHECK-RV32-NEXT:    j .LBB61_436
-; CHECK-RV32-NEXT:  .LBB61_929: # %cond.load1637
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 411
-; CHECK-RV32-NEXT:    li a4, 410
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 4
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_930
-; CHECK-RV32-NEXT:    j .LBB61_437
-; CHECK-RV32-NEXT:  .LBB61_930: # %cond.load1641
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 412
-; CHECK-RV32-NEXT:    li a4, 411
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 3
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_931
-; CHECK-RV32-NEXT:    j .LBB61_438
-; CHECK-RV32-NEXT:  .LBB61_931: # %cond.load1645
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 413
-; CHECK-RV32-NEXT:    li a4, 412
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 2
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_1037
-; CHECK-RV32-NEXT:    j .LBB61_439
-; CHECK-RV32-NEXT:  .LBB61_1037: # %cond.load1645
-; CHECK-RV32-NEXT:    j .LBB61_440
-; CHECK-RV32-NEXT:  .LBB61_932: # %cond.load1657
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 416
-; CHECK-RV32-NEXT:    li a4, 415
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a3, a2, 1
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_933
-; CHECK-RV32-NEXT:    j .LBB61_444
-; CHECK-RV32-NEXT:  .LBB61_933: # %cond.load1661
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 417
-; CHECK-RV32-NEXT:    li a4, 416
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a3, a2, 2
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_934
-; CHECK-RV32-NEXT:    j .LBB61_445
-; CHECK-RV32-NEXT:  .LBB61_934: # %cond.load1665
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 418
-; CHECK-RV32-NEXT:    li a4, 417
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a3, a2, 4
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_935
-; CHECK-RV32-NEXT:    j .LBB61_446
-; CHECK-RV32-NEXT:  .LBB61_935: # %cond.load1669
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 419
-; CHECK-RV32-NEXT:    li a4, 418
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a3, a2, 8
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_936
-; CHECK-RV32-NEXT:    j .LBB61_447
-; CHECK-RV32-NEXT:  .LBB61_936: # %cond.load1673
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 420
-; CHECK-RV32-NEXT:    li a4, 419
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a3, a2, 16
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_937
-; CHECK-RV32-NEXT:    j .LBB61_448
-; CHECK-RV32-NEXT:  .LBB61_937: # %cond.load1677
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 421
-; CHECK-RV32-NEXT:    li a4, 420
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a3, a2, 32
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_938
-; CHECK-RV32-NEXT:    j .LBB61_449
-; CHECK-RV32-NEXT:  .LBB61_938: # %cond.load1681
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 422
-; CHECK-RV32-NEXT:    li a4, 421
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a3, a2, 64
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_939
-; CHECK-RV32-NEXT:    j .LBB61_450
-; CHECK-RV32-NEXT:  .LBB61_939: # %cond.load1685
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 423
-; CHECK-RV32-NEXT:    li a4, 422
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a3, a2, 128
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_940
-; CHECK-RV32-NEXT:    j .LBB61_451
-; CHECK-RV32-NEXT:  .LBB61_940: # %cond.load1689
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 424
-; CHECK-RV32-NEXT:    li a4, 423
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a3, a2, 256
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_941
-; CHECK-RV32-NEXT:    j .LBB61_452
-; CHECK-RV32-NEXT:  .LBB61_941: # %cond.load1693
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 425
-; CHECK-RV32-NEXT:    li a4, 424
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a3, a2, 512
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_942
-; CHECK-RV32-NEXT:    j .LBB61_453
-; CHECK-RV32-NEXT:  .LBB61_942: # %cond.load1697
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 426
-; CHECK-RV32-NEXT:    li a4, 425
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-RV32-NEXT:    bnez a3, .LBB61_943
-; CHECK-RV32-NEXT:    j .LBB61_454
-; CHECK-RV32-NEXT:  .LBB61_943: # %cond.load1701
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 427
-; CHECK-RV32-NEXT:    li a4, 426
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 20
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_944
-; CHECK-RV32-NEXT:    j .LBB61_455
-; CHECK-RV32-NEXT:  .LBB61_944: # %cond.load1705
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 428
-; CHECK-RV32-NEXT:    li a4, 427
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 19
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_945
-; CHECK-RV32-NEXT:    j .LBB61_456
-; CHECK-RV32-NEXT:  .LBB61_945: # %cond.load1709
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 429
-; CHECK-RV32-NEXT:    li a4, 428
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 18
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_946
-; CHECK-RV32-NEXT:    j .LBB61_457
-; CHECK-RV32-NEXT:  .LBB61_946: # %cond.load1713
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 430
-; CHECK-RV32-NEXT:    li a4, 429
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 17
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_947
-; CHECK-RV32-NEXT:    j .LBB61_458
-; CHECK-RV32-NEXT:  .LBB61_947: # %cond.load1717
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 431
-; CHECK-RV32-NEXT:    li a4, 430
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 16
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_948
-; CHECK-RV32-NEXT:    j .LBB61_459
-; CHECK-RV32-NEXT:  .LBB61_948: # %cond.load1721
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 432
-; CHECK-RV32-NEXT:    li a4, 431
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 15
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_949
-; CHECK-RV32-NEXT:    j .LBB61_460
-; CHECK-RV32-NEXT:  .LBB61_949: # %cond.load1725
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 433
-; CHECK-RV32-NEXT:    li a4, 432
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 14
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_950
-; CHECK-RV32-NEXT:    j .LBB61_461
-; CHECK-RV32-NEXT:  .LBB61_950: # %cond.load1729
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 434
-; CHECK-RV32-NEXT:    li a4, 433
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 13
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_951
-; CHECK-RV32-NEXT:    j .LBB61_462
-; CHECK-RV32-NEXT:  .LBB61_951: # %cond.load1733
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 435
-; CHECK-RV32-NEXT:    li a4, 434
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 12
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_952
-; CHECK-RV32-NEXT:    j .LBB61_463
-; CHECK-RV32-NEXT:  .LBB61_952: # %cond.load1737
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 436
-; CHECK-RV32-NEXT:    li a4, 435
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 11
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_953
-; CHECK-RV32-NEXT:    j .LBB61_464
-; CHECK-RV32-NEXT:  .LBB61_953: # %cond.load1741
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 437
-; CHECK-RV32-NEXT:    li a4, 436
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 10
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_954
-; CHECK-RV32-NEXT:    j .LBB61_465
-; CHECK-RV32-NEXT:  .LBB61_954: # %cond.load1745
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 438
-; CHECK-RV32-NEXT:    li a4, 437
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 9
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_955
-; CHECK-RV32-NEXT:    j .LBB61_466
-; CHECK-RV32-NEXT:  .LBB61_955: # %cond.load1749
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 439
-; CHECK-RV32-NEXT:    li a4, 438
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 8
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_956
-; CHECK-RV32-NEXT:    j .LBB61_467
-; CHECK-RV32-NEXT:  .LBB61_956: # %cond.load1753
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 440
-; CHECK-RV32-NEXT:    li a4, 439
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 7
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_957
-; CHECK-RV32-NEXT:    j .LBB61_468
-; CHECK-RV32-NEXT:  .LBB61_957: # %cond.load1757
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 441
-; CHECK-RV32-NEXT:    li a4, 440
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 6
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_958
-; CHECK-RV32-NEXT:    j .LBB61_469
-; CHECK-RV32-NEXT:  .LBB61_958: # %cond.load1761
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 442
-; CHECK-RV32-NEXT:    li a4, 441
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 5
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_959
-; CHECK-RV32-NEXT:    j .LBB61_470
-; CHECK-RV32-NEXT:  .LBB61_959: # %cond.load1765
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 443
-; CHECK-RV32-NEXT:    li a4, 442
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 4
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_960
-; CHECK-RV32-NEXT:    j .LBB61_471
-; CHECK-RV32-NEXT:  .LBB61_960: # %cond.load1769
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 444
-; CHECK-RV32-NEXT:    li a4, 443
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 3
-; CHECK-RV32-NEXT:    bltz a3, .LBB61_961
-; CHECK-RV32-NEXT:    j .LBB61_472
-; CHECK-RV32-NEXT:  .LBB61_961: # %cond.load1773
-; CHECK-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-RV32-NEXT:    li a3, 445
-; CHECK-RV32-NEXT:    li a4, 444
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a3, a2, 2
-; CHECK-RV32-NEXT:    bgez a3, .LBB61_1038
-; CHECK-RV32-NEXT:    j .LBB61_473
-; CHECK-RV32-NEXT:  .LBB61_1038: # %cond.load1773
-; CHECK-RV32-NEXT:    j .LBB61_474
-; CHECK-RV32-NEXT:  .LBB61_962: # %cond.load1785
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 448
-; CHECK-RV32-NEXT:    li a4, 447
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 1
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_963
-; CHECK-RV32-NEXT:    j .LBB61_478
-; CHECK-RV32-NEXT:  .LBB61_963: # %cond.load1789
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 449
-; CHECK-RV32-NEXT:    li a4, 448
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 2
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_964
-; CHECK-RV32-NEXT:    j .LBB61_479
-; CHECK-RV32-NEXT:  .LBB61_964: # %cond.load1793
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 450
-; CHECK-RV32-NEXT:    li a4, 449
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 4
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_965
-; CHECK-RV32-NEXT:    j .LBB61_480
-; CHECK-RV32-NEXT:  .LBB61_965: # %cond.load1797
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 451
-; CHECK-RV32-NEXT:    li a4, 450
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 8
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_966
-; CHECK-RV32-NEXT:    j .LBB61_481
-; CHECK-RV32-NEXT:  .LBB61_966: # %cond.load1801
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 452
-; CHECK-RV32-NEXT:    li a4, 451
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 16
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_967
-; CHECK-RV32-NEXT:    j .LBB61_482
-; CHECK-RV32-NEXT:  .LBB61_967: # %cond.load1805
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 453
-; CHECK-RV32-NEXT:    li a4, 452
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 32
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_968
-; CHECK-RV32-NEXT:    j .LBB61_483
-; CHECK-RV32-NEXT:  .LBB61_968: # %cond.load1809
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 454
-; CHECK-RV32-NEXT:    li a4, 453
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 64
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_969
-; CHECK-RV32-NEXT:    j .LBB61_484
-; CHECK-RV32-NEXT:  .LBB61_969: # %cond.load1813
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 455
-; CHECK-RV32-NEXT:    li a4, 454
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 128
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_970
-; CHECK-RV32-NEXT:    j .LBB61_485
-; CHECK-RV32-NEXT:  .LBB61_970: # %cond.load1817
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 456
-; CHECK-RV32-NEXT:    li a4, 455
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 256
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_971
-; CHECK-RV32-NEXT:    j .LBB61_486
-; CHECK-RV32-NEXT:  .LBB61_971: # %cond.load1821
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 457
-; CHECK-RV32-NEXT:    li a4, 456
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 512
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_972
-; CHECK-RV32-NEXT:    j .LBB61_487
-; CHECK-RV32-NEXT:  .LBB61_972: # %cond.load1825
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 458
-; CHECK-RV32-NEXT:    li a4, 457
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_973
-; CHECK-RV32-NEXT:    j .LBB61_488
-; CHECK-RV32-NEXT:  .LBB61_973: # %cond.load1829
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 459
-; CHECK-RV32-NEXT:    li a4, 458
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 20
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_974
-; CHECK-RV32-NEXT:    j .LBB61_489
-; CHECK-RV32-NEXT:  .LBB61_974: # %cond.load1833
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 460
-; CHECK-RV32-NEXT:    li a4, 459
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 19
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_975
-; CHECK-RV32-NEXT:    j .LBB61_490
-; CHECK-RV32-NEXT:  .LBB61_975: # %cond.load1837
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 461
-; CHECK-RV32-NEXT:    li a4, 460
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 18
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_976
-; CHECK-RV32-NEXT:    j .LBB61_491
-; CHECK-RV32-NEXT:  .LBB61_976: # %cond.load1841
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 462
-; CHECK-RV32-NEXT:    li a4, 461
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 17
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_977
-; CHECK-RV32-NEXT:    j .LBB61_492
-; CHECK-RV32-NEXT:  .LBB61_977: # %cond.load1845
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 463
-; CHECK-RV32-NEXT:    li a4, 462
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 16
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_978
-; CHECK-RV32-NEXT:    j .LBB61_493
-; CHECK-RV32-NEXT:  .LBB61_978: # %cond.load1849
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 464
-; CHECK-RV32-NEXT:    li a4, 463
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 15
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_979
-; CHECK-RV32-NEXT:    j .LBB61_494
-; CHECK-RV32-NEXT:  .LBB61_979: # %cond.load1853
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 465
-; CHECK-RV32-NEXT:    li a4, 464
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 14
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_980
-; CHECK-RV32-NEXT:    j .LBB61_495
-; CHECK-RV32-NEXT:  .LBB61_980: # %cond.load1857
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 466
-; CHECK-RV32-NEXT:    li a4, 465
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 13
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_981
-; CHECK-RV32-NEXT:    j .LBB61_496
-; CHECK-RV32-NEXT:  .LBB61_981: # %cond.load1861
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 467
-; CHECK-RV32-NEXT:    li a4, 466
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 12
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_982
-; CHECK-RV32-NEXT:    j .LBB61_497
-; CHECK-RV32-NEXT:  .LBB61_982: # %cond.load1865
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 468
-; CHECK-RV32-NEXT:    li a4, 467
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 11
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_983
-; CHECK-RV32-NEXT:    j .LBB61_498
-; CHECK-RV32-NEXT:  .LBB61_983: # %cond.load1869
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 469
-; CHECK-RV32-NEXT:    li a4, 468
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 10
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_984
-; CHECK-RV32-NEXT:    j .LBB61_499
-; CHECK-RV32-NEXT:  .LBB61_984: # %cond.load1873
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 470
-; CHECK-RV32-NEXT:    li a4, 469
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 9
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_985
-; CHECK-RV32-NEXT:    j .LBB61_500
-; CHECK-RV32-NEXT:  .LBB61_985: # %cond.load1877
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 471
-; CHECK-RV32-NEXT:    li a4, 470
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 8
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_986
-; CHECK-RV32-NEXT:    j .LBB61_501
-; CHECK-RV32-NEXT:  .LBB61_986: # %cond.load1881
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 472
-; CHECK-RV32-NEXT:    li a4, 471
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 7
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_987
-; CHECK-RV32-NEXT:    j .LBB61_502
-; CHECK-RV32-NEXT:  .LBB61_987: # %cond.load1885
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 473
-; CHECK-RV32-NEXT:    li a4, 472
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 6
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_988
-; CHECK-RV32-NEXT:    j .LBB61_503
-; CHECK-RV32-NEXT:  .LBB61_988: # %cond.load1889
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 474
-; CHECK-RV32-NEXT:    li a4, 473
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 5
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_989
-; CHECK-RV32-NEXT:    j .LBB61_504
-; CHECK-RV32-NEXT:  .LBB61_989: # %cond.load1893
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 475
-; CHECK-RV32-NEXT:    li a4, 474
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 4
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_990
-; CHECK-RV32-NEXT:    j .LBB61_505
-; CHECK-RV32-NEXT:  .LBB61_990: # %cond.load1897
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 476
-; CHECK-RV32-NEXT:    li a4, 475
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 3
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_991
-; CHECK-RV32-NEXT:    j .LBB61_506
-; CHECK-RV32-NEXT:  .LBB61_991: # %cond.load1901
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a4, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-RV32-NEXT:    li a2, 477
-; CHECK-RV32-NEXT:    li a4, 476
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a3, 2
-; CHECK-RV32-NEXT:    bgez a2, .LBB61_1039
-; CHECK-RV32-NEXT:    j .LBB61_507
-; CHECK-RV32-NEXT:  .LBB61_1039: # %cond.load1901
-; CHECK-RV32-NEXT:    j .LBB61_508
-; CHECK-RV32-NEXT:  .LBB61_992: # %cond.load1913
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-RV32-NEXT:    li a2, 480
-; CHECK-RV32-NEXT:    li a3, 479
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a1, 1
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_993
-; CHECK-RV32-NEXT:    j .LBB61_512
-; CHECK-RV32-NEXT:  .LBB61_993: # %cond.load1917
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a3, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-RV32-NEXT:    li a2, 481
-; CHECK-RV32-NEXT:    li a3, 480
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a1, 2
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_994
-; CHECK-RV32-NEXT:    j .LBB61_513
-; CHECK-RV32-NEXT:  .LBB61_994: # %cond.load1921
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a3, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-RV32-NEXT:    li a2, 482
-; CHECK-RV32-NEXT:    li a3, 481
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a1, 4
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_995
-; CHECK-RV32-NEXT:    j .LBB61_514
-; CHECK-RV32-NEXT:  .LBB61_995: # %cond.load1925
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a3, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-RV32-NEXT:    li a2, 483
-; CHECK-RV32-NEXT:    li a3, 482
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a1, 8
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_996
-; CHECK-RV32-NEXT:    j .LBB61_515
-; CHECK-RV32-NEXT:  .LBB61_996: # %cond.load1929
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a3, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-RV32-NEXT:    li a2, 484
-; CHECK-RV32-NEXT:    li a3, 483
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a1, 16
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_997
-; CHECK-RV32-NEXT:    j .LBB61_516
-; CHECK-RV32-NEXT:  .LBB61_997: # %cond.load1933
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a3, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-RV32-NEXT:    li a2, 485
-; CHECK-RV32-NEXT:    li a3, 484
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a1, 32
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_998
-; CHECK-RV32-NEXT:    j .LBB61_517
-; CHECK-RV32-NEXT:  .LBB61_998: # %cond.load1937
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a3, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-RV32-NEXT:    li a2, 486
-; CHECK-RV32-NEXT:    li a3, 485
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a1, 64
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_999
-; CHECK-RV32-NEXT:    j .LBB61_518
-; CHECK-RV32-NEXT:  .LBB61_999: # %cond.load1941
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a3, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-RV32-NEXT:    li a2, 487
-; CHECK-RV32-NEXT:    li a3, 486
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a1, 128
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_1000
-; CHECK-RV32-NEXT:    j .LBB61_519
-; CHECK-RV32-NEXT:  .LBB61_1000: # %cond.load1945
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a3, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-RV32-NEXT:    li a2, 488
-; CHECK-RV32-NEXT:    li a3, 487
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a1, 256
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_1001
-; CHECK-RV32-NEXT:    j .LBB61_520
-; CHECK-RV32-NEXT:  .LBB61_1001: # %cond.load1949
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a3, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-RV32-NEXT:    li a2, 489
-; CHECK-RV32-NEXT:    li a3, 488
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a1, 512
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_1002
-; CHECK-RV32-NEXT:    j .LBB61_521
-; CHECK-RV32-NEXT:  .LBB61_1002: # %cond.load1953
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a3, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-RV32-NEXT:    li a2, 490
-; CHECK-RV32-NEXT:    li a3, 489
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    andi a2, a1, 1024
-; CHECK-RV32-NEXT:    bnez a2, .LBB61_1003
-; CHECK-RV32-NEXT:    j .LBB61_522
-; CHECK-RV32-NEXT:  .LBB61_1003: # %cond.load1957
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a3, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-RV32-NEXT:    li a2, 491
-; CHECK-RV32-NEXT:    li a3, 490
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a1, 20
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_1004
-; CHECK-RV32-NEXT:    j .LBB61_523
-; CHECK-RV32-NEXT:  .LBB61_1004: # %cond.load1961
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a3, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-RV32-NEXT:    li a2, 492
-; CHECK-RV32-NEXT:    li a3, 491
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a1, 19
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_1005
-; CHECK-RV32-NEXT:    j .LBB61_524
-; CHECK-RV32-NEXT:  .LBB61_1005: # %cond.load1965
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a3, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-RV32-NEXT:    li a2, 493
-; CHECK-RV32-NEXT:    li a3, 492
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a1, 18
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_1006
-; CHECK-RV32-NEXT:    j .LBB61_525
-; CHECK-RV32-NEXT:  .LBB61_1006: # %cond.load1969
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a3, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-RV32-NEXT:    li a2, 494
-; CHECK-RV32-NEXT:    li a3, 493
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a1, 17
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_1007
-; CHECK-RV32-NEXT:    j .LBB61_526
-; CHECK-RV32-NEXT:  .LBB61_1007: # %cond.load1973
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a3, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-RV32-NEXT:    li a2, 495
-; CHECK-RV32-NEXT:    li a3, 494
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a1, 16
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_1008
-; CHECK-RV32-NEXT:    j .LBB61_527
-; CHECK-RV32-NEXT:  .LBB61_1008: # %cond.load1977
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a3, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-RV32-NEXT:    li a2, 496
-; CHECK-RV32-NEXT:    li a3, 495
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a1, 15
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_1009
-; CHECK-RV32-NEXT:    j .LBB61_528
-; CHECK-RV32-NEXT:  .LBB61_1009: # %cond.load1981
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a3, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-RV32-NEXT:    li a2, 497
-; CHECK-RV32-NEXT:    li a3, 496
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a1, 14
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_1010
-; CHECK-RV32-NEXT:    j .LBB61_529
-; CHECK-RV32-NEXT:  .LBB61_1010: # %cond.load1985
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a3, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-RV32-NEXT:    li a2, 498
-; CHECK-RV32-NEXT:    li a3, 497
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a1, 13
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_1011
-; CHECK-RV32-NEXT:    j .LBB61_530
-; CHECK-RV32-NEXT:  .LBB61_1011: # %cond.load1989
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a3, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-RV32-NEXT:    li a2, 499
-; CHECK-RV32-NEXT:    li a3, 498
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a1, 12
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_1012
-; CHECK-RV32-NEXT:    j .LBB61_531
-; CHECK-RV32-NEXT:  .LBB61_1012: # %cond.load1993
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a3, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-RV32-NEXT:    li a2, 500
-; CHECK-RV32-NEXT:    li a3, 499
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a1, 11
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_1013
-; CHECK-RV32-NEXT:    j .LBB61_532
-; CHECK-RV32-NEXT:  .LBB61_1013: # %cond.load1997
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a3, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-RV32-NEXT:    li a2, 501
-; CHECK-RV32-NEXT:    li a3, 500
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a1, 10
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_1014
-; CHECK-RV32-NEXT:    j .LBB61_533
-; CHECK-RV32-NEXT:  .LBB61_1014: # %cond.load2001
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a3, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-RV32-NEXT:    li a2, 502
-; CHECK-RV32-NEXT:    li a3, 501
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a1, 9
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_1015
-; CHECK-RV32-NEXT:    j .LBB61_534
-; CHECK-RV32-NEXT:  .LBB61_1015: # %cond.load2005
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a3, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-RV32-NEXT:    li a2, 503
-; CHECK-RV32-NEXT:    li a3, 502
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a1, 8
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_1016
-; CHECK-RV32-NEXT:    j .LBB61_535
-; CHECK-RV32-NEXT:  .LBB61_1016: # %cond.load2009
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a3, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-RV32-NEXT:    li a2, 504
-; CHECK-RV32-NEXT:    li a3, 503
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a1, 7
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_1017
-; CHECK-RV32-NEXT:    j .LBB61_536
-; CHECK-RV32-NEXT:  .LBB61_1017: # %cond.load2013
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a3, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-RV32-NEXT:    li a2, 505
-; CHECK-RV32-NEXT:    li a3, 504
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a1, 6
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_1018
-; CHECK-RV32-NEXT:    j .LBB61_537
-; CHECK-RV32-NEXT:  .LBB61_1018: # %cond.load2017
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a3, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-RV32-NEXT:    li a2, 506
-; CHECK-RV32-NEXT:    li a3, 505
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a1, 5
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_1019
-; CHECK-RV32-NEXT:    j .LBB61_538
-; CHECK-RV32-NEXT:  .LBB61_1019: # %cond.load2021
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a3, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-RV32-NEXT:    li a2, 507
-; CHECK-RV32-NEXT:    li a3, 506
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a1, 4
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_1020
-; CHECK-RV32-NEXT:    j .LBB61_539
-; CHECK-RV32-NEXT:  .LBB61_1020: # %cond.load2025
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a3, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-RV32-NEXT:    li a2, 508
-; CHECK-RV32-NEXT:    li a3, 507
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a1, 3
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_1021
-; CHECK-RV32-NEXT:    j .LBB61_540
-; CHECK-RV32-NEXT:  .LBB61_1021: # %cond.load2029
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a3, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-RV32-NEXT:    li a2, 509
-; CHECK-RV32-NEXT:    li a3, 508
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a1, 2
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_1022
-; CHECK-RV32-NEXT:    j .LBB61_541
-; CHECK-RV32-NEXT:  .LBB61_1022: # %cond.load2033
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a3, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-RV32-NEXT:    li a2, 510
-; CHECK-RV32-NEXT:    li a3, 509
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    slli a2, a1, 1
-; CHECK-RV32-NEXT:    bltz a2, .LBB61_1023
-; CHECK-RV32-NEXT:    j .LBB61_542
-; CHECK-RV32-NEXT:  .LBB61_1023: # %cond.load2037
-; CHECK-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-RV32-NEXT:    li a3, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-RV32-NEXT:    li a2, 511
-; CHECK-RV32-NEXT:    li a3, 510
-; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV32-NEXT:    addi a0, a0, 1
-; CHECK-RV32-NEXT:    bltz a1, .LBB61_1024
-; CHECK-RV32-NEXT:    j .LBB61_543
-; CHECK-RV32-NEXT:  .LBB61_1024: # %cond.load2041
-; CHECK-RV32-NEXT:    lbu a0, 0(a0)
-; CHECK-RV32-NEXT:    li a1, 512
-; CHECK-RV32-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
-; CHECK-RV32-NEXT:    vmv.s.x v16, a0
-; CHECK-RV32-NEXT:    li a0, 511
-; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a0
-; CHECK-RV32-NEXT:    ret
+; CHECK-VRGATHER-RV32-LABEL: test_expandload_v512i8_vlen512:
+; CHECK-VRGATHER-RV32:       # %bb.0:
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a3, v0
+; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 1
+; CHECK-VRGATHER-RV32-NEXT:    beqz a1, .LBB61_1
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_544
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1: # %else
+; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 2
+; CHECK-VRGATHER-RV32-NEXT:    beqz a1, .LBB61_2
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_545
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_2: # %else2
+; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 4
+; CHECK-VRGATHER-RV32-NEXT:    beqz a1, .LBB61_3
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_546
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_3: # %else6
+; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 8
+; CHECK-VRGATHER-RV32-NEXT:    beqz a1, .LBB61_4
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_547
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_4: # %else10
+; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 16
+; CHECK-VRGATHER-RV32-NEXT:    beqz a1, .LBB61_5
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_548
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_5: # %else14
+; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 32
+; CHECK-VRGATHER-RV32-NEXT:    beqz a1, .LBB61_6
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_549
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_6: # %else18
+; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 64
+; CHECK-VRGATHER-RV32-NEXT:    beqz a1, .LBB61_7
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_550
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_7: # %else22
+; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 128
+; CHECK-VRGATHER-RV32-NEXT:    beqz a1, .LBB61_8
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_551
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_8: # %else26
+; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 256
+; CHECK-VRGATHER-RV32-NEXT:    beqz a1, .LBB61_9
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_552
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_9: # %else30
+; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    beqz a1, .LBB61_10
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_553
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_10: # %else34
+; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 1024
+; CHECK-VRGATHER-RV32-NEXT:    beqz a1, .LBB61_11
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_554
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_11: # %else38
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 20
+; CHECK-VRGATHER-RV32-NEXT:    bgez a1, .LBB61_12
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_555
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_12: # %else42
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 19
+; CHECK-VRGATHER-RV32-NEXT:    bgez a1, .LBB61_13
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_556
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_13: # %else46
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 18
+; CHECK-VRGATHER-RV32-NEXT:    bgez a1, .LBB61_14
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_557
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_14: # %else50
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 17
+; CHECK-VRGATHER-RV32-NEXT:    bgez a1, .LBB61_15
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_558
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_15: # %else54
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 16
+; CHECK-VRGATHER-RV32-NEXT:    bgez a1, .LBB61_16
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_559
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_16: # %else58
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 15
+; CHECK-VRGATHER-RV32-NEXT:    bgez a1, .LBB61_17
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_560
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_17: # %else62
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 14
+; CHECK-VRGATHER-RV32-NEXT:    bgez a1, .LBB61_18
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_561
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_18: # %else66
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 13
+; CHECK-VRGATHER-RV32-NEXT:    bgez a1, .LBB61_19
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_562
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_19: # %else70
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 12
+; CHECK-VRGATHER-RV32-NEXT:    bgez a1, .LBB61_20
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_563
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_20: # %else74
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 11
+; CHECK-VRGATHER-RV32-NEXT:    bgez a1, .LBB61_21
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_564
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_21: # %else78
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 10
+; CHECK-VRGATHER-RV32-NEXT:    bgez a1, .LBB61_22
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_565
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_22: # %else82
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 9
+; CHECK-VRGATHER-RV32-NEXT:    bgez a1, .LBB61_23
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_566
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_23: # %else86
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 8
+; CHECK-VRGATHER-RV32-NEXT:    bgez a1, .LBB61_24
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_567
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_24: # %else90
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 7
+; CHECK-VRGATHER-RV32-NEXT:    bgez a1, .LBB61_25
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_568
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_25: # %else94
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 6
+; CHECK-VRGATHER-RV32-NEXT:    bgez a1, .LBB61_26
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_569
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_26: # %else98
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 5
+; CHECK-VRGATHER-RV32-NEXT:    bgez a1, .LBB61_27
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_570
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_27: # %else102
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 4
+; CHECK-VRGATHER-RV32-NEXT:    bgez a1, .LBB61_28
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_571
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_28: # %else106
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 3
+; CHECK-VRGATHER-RV32-NEXT:    bgez a1, .LBB61_30
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_29: # %cond.load109
+; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 29, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 28
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_30: # %else110
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 2
+; CHECK-VRGATHER-RV32-NEXT:    li a1, 32
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_32
+; CHECK-VRGATHER-RV32-NEXT:  # %bb.31: # %cond.load113
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 30, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 29
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_32: # %else114
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 1
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vsrl.vx v16, v0, a1
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_34
+; CHECK-VRGATHER-RV32-NEXT:  # %bb.33: # %cond.load117
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v17, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 31, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v17, 30
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_34: # %else118
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a2, v16
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_35
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_572
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_35: # %else122
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_36
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_573
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_36: # %else126
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 2
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_37
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_574
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_37: # %else130
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 4
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_38
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_575
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_38: # %else134
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 8
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_39
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_576
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_39: # %else138
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 16
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_40
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_577
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_40: # %else142
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 32
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_41
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_578
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_41: # %else146
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 64
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_42
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_579
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_42: # %else150
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 128
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_43
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_580
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_43: # %else154
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 256
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_44
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_581
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_44: # %else158
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 512
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_45
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_582
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_45: # %else162
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_46
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_583
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_46: # %else166
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 20
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_47
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_584
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_47: # %else170
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 19
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_48
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_585
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_48: # %else174
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 18
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_49
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_586
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_49: # %else178
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 17
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_50
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_587
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_50: # %else182
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 16
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_51
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_588
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_51: # %else186
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 15
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_52
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_589
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_52: # %else190
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 14
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_53
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_590
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_53: # %else194
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 13
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_54
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_591
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_54: # %else198
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 12
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_55
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_592
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_55: # %else202
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 11
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_56
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_593
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_56: # %else206
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 10
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_57
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_594
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_57: # %else210
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 9
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_58
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_595
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_58: # %else214
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 8
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_59
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_596
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_59: # %else218
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 7
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_60
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_597
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_60: # %else222
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 6
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_61
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_598
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_61: # %else226
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 5
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_62
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_599
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_62: # %else230
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 4
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_63
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_600
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_63: # %else234
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 3
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_64
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_601
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_64: # %else238
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 2
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_66
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_65: # %cond.load241
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 62
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 61
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_66: # %else242
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 1
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslidedown.vi v16, v0, 1
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_68
+; CHECK-VRGATHER-RV32-NEXT:  # %bb.67: # %cond.load245
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v17, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 63
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 62
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v17, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_68: # %else246
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a3, v16
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_69
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_602
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_69: # %else250
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_70
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_603
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_70: # %else254
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 2
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_71
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_604
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_71: # %else258
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 4
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_72
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_605
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_72: # %else262
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 8
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_73
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_606
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_73: # %else266
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 16
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_74
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_607
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_74: # %else270
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 32
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_75
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_608
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_75: # %else274
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 64
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_76
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_609
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_76: # %else278
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 128
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_77
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_610
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_77: # %else282
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 256
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_78
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_611
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_78: # %else286
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_79
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_612
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_79: # %else290
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_80
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_613
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_80: # %else294
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 20
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_81
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_614
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_81: # %else298
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 19
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_82
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_615
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_82: # %else302
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 18
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_83
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_616
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_83: # %else306
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 17
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_84
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_617
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_84: # %else310
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 16
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_85
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_618
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_85: # %else314
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 15
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_86
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_619
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_86: # %else318
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 14
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_87
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_620
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_87: # %else322
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 13
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_88
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_621
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_88: # %else326
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 12
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_89
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_622
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_89: # %else330
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 11
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_90
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_623
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_90: # %else334
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 10
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_91
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_624
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_91: # %else338
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 9
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_92
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_625
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_92: # %else342
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 8
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_93
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_626
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_93: # %else346
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 7
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_94
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_627
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_94: # %else350
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 6
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_95
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_628
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_95: # %else354
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 5
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_96
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_629
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_96: # %else358
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 4
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_97
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_630
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_97: # %else362
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 3
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_98
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_631
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_98: # %else366
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 2
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_100
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_99: # %cond.load369
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 94
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 93
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_100: # %else370
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 1
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vsrl.vx v16, v16, a1
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_102
+; CHECK-VRGATHER-RV32-NEXT:  # %bb.101: # %cond.load373
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 95
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 94
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_102: # %else374
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a2, v16
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_103
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_632
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_103: # %else378
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_104
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_633
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_104: # %else382
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 2
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_105
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_634
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_105: # %else386
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 4
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_106
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_635
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_106: # %else390
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 8
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_107
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_636
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_107: # %else394
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 16
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_108
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_637
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_108: # %else398
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 32
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_109
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_638
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_109: # %else402
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 64
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_110
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_639
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_110: # %else406
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 128
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_111
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_640
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_111: # %else410
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 256
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_112
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_641
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_112: # %else414
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 512
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_113
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_642
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_113: # %else418
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_114
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_643
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_114: # %else422
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 20
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_115
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_644
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_115: # %else426
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 19
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_116
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_645
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_116: # %else430
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 18
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_117
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_646
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_117: # %else434
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 17
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_118
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_647
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_118: # %else438
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 16
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_119
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_648
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_119: # %else442
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 15
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_120
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_649
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_120: # %else446
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 14
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_121
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_650
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_121: # %else450
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 13
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_122
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_651
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_122: # %else454
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 12
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_123
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_652
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_123: # %else458
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 11
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_124
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_653
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_124: # %else462
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 10
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_125
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_654
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_125: # %else466
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 9
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_126
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_655
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_126: # %else470
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 8
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_127
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_656
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_127: # %else474
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 7
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_128
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_657
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_128: # %else478
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 6
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_129
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_658
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_129: # %else482
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 5
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_130
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_659
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_130: # %else486
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 4
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_131
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_660
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_131: # %else490
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 3
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_132
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_661
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_132: # %else494
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 2
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_134
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_133: # %cond.load497
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 126
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 125
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_134: # %else498
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 1
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslidedown.vi v16, v0, 2
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_136
+; CHECK-VRGATHER-RV32-NEXT:  # %bb.135: # %cond.load501
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 127
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 126
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_136: # %else502
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a3, v16
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_137
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_662
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_137: # %else506
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_138
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_663
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_138: # %else510
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 2
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_139
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_664
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_139: # %else514
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 4
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_140
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_665
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_140: # %else518
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 8
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_141
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_666
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_141: # %else522
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 16
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_142
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_667
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_142: # %else526
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 32
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_143
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_668
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_143: # %else530
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 64
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_144
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_669
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_144: # %else534
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 128
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_145
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_670
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_145: # %else538
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 256
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_146
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_671
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_146: # %else542
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_147
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_672
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_147: # %else546
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_148
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_673
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_148: # %else550
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 20
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_149
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_674
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_149: # %else554
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 19
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_150
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_675
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_150: # %else558
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 18
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_151
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_676
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_151: # %else562
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 17
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_152
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_677
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_152: # %else566
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 16
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_153
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_678
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_153: # %else570
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 15
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_154
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_679
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_154: # %else574
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 14
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_155
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_680
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_155: # %else578
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 13
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_156
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_681
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_156: # %else582
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 12
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_157
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_682
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_157: # %else586
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 11
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_158
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_683
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_158: # %else590
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 10
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_159
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_684
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_159: # %else594
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 9
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_160
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_685
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_160: # %else598
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 8
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_161
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_686
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_161: # %else602
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 7
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_162
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_687
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_162: # %else606
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 6
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_163
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_688
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_163: # %else610
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 5
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_164
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_689
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_164: # %else614
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 4
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_165
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_690
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_165: # %else618
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 3
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_166
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_691
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_166: # %else622
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 2
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_168
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_167: # %cond.load625
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 158
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 157
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_168: # %else626
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 1
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vsrl.vx v16, v16, a1
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_170
+; CHECK-VRGATHER-RV32-NEXT:  # %bb.169: # %cond.load629
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 159
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 158
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_170: # %else630
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a2, v16
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_171
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_692
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_171: # %else634
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_172
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_693
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_172: # %else638
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 2
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_173
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_694
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_173: # %else642
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 4
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_174
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_695
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_174: # %else646
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 8
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_175
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_696
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_175: # %else650
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 16
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_176
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_697
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_176: # %else654
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 32
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_177
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_698
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_177: # %else658
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 64
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_178
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_699
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_178: # %else662
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 128
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_179
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_700
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_179: # %else666
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 256
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_180
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_701
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_180: # %else670
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 512
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_181
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_702
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_181: # %else674
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_182
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_703
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_182: # %else678
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 20
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_183
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_704
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_183: # %else682
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 19
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_184
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_705
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_184: # %else686
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 18
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_185
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_706
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_185: # %else690
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 17
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_186
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_707
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_186: # %else694
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 16
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_187
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_708
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_187: # %else698
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 15
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_188
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_709
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_188: # %else702
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 14
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_189
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_710
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_189: # %else706
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 13
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_190
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_711
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_190: # %else710
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 12
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_191
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_712
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_191: # %else714
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 11
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_192
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_713
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_192: # %else718
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 10
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_193
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_714
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_193: # %else722
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 9
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_194
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_715
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_194: # %else726
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 8
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_195
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_716
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_195: # %else730
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 7
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_196
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_717
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_196: # %else734
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 6
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_197
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_718
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_197: # %else738
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 5
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_198
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_719
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_198: # %else742
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 4
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_199
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_720
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_199: # %else746
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 3
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_200
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_721
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_200: # %else750
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 2
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_202
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_201: # %cond.load753
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 190
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 189
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_202: # %else754
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 1
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslidedown.vi v16, v0, 3
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_204
+; CHECK-VRGATHER-RV32-NEXT:  # %bb.203: # %cond.load757
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 191
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 190
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_204: # %else758
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a3, v16
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_205
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_722
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_205: # %else762
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_206
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_723
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_206: # %else766
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 2
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_207
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_724
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_207: # %else770
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 4
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_208
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_725
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_208: # %else774
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 8
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_209
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_726
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_209: # %else778
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 16
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_210
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_727
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_210: # %else782
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 32
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_211
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_728
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_211: # %else786
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 64
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_212
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_729
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_212: # %else790
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 128
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_213
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_730
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_213: # %else794
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 256
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_214
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_731
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_214: # %else798
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_215
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_732
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_215: # %else802
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_216
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_733
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_216: # %else806
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 20
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_217
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_734
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_217: # %else810
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 19
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_218
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_735
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_218: # %else814
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 18
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_219
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_736
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_219: # %else818
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 17
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_220
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_737
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_220: # %else822
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 16
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_221
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_738
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_221: # %else826
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 15
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_222
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_739
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_222: # %else830
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 14
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_223
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_740
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_223: # %else834
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 13
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_224
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_741
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_224: # %else838
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 12
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_225
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_742
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_225: # %else842
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 11
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_226
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_743
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_226: # %else846
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 10
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_227
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_744
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_227: # %else850
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 9
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_228
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_745
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_228: # %else854
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 8
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_229
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_746
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_229: # %else858
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 7
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_230
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_747
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_230: # %else862
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 6
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_231
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_748
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_231: # %else866
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 5
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_232
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_749
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_232: # %else870
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 4
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_233
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_750
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_233: # %else874
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 3
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_234
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_751
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_234: # %else878
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 2
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_236
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_235: # %cond.load881
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 222
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 221
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_236: # %else882
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 1
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vsrl.vx v16, v16, a1
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_238
+; CHECK-VRGATHER-RV32-NEXT:  # %bb.237: # %cond.load885
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 223
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 222
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_238: # %else886
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a2, v16
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_239
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_752
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_239: # %else890
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_240
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_753
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_240: # %else894
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 2
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_241
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_754
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_241: # %else898
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 4
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_242
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_755
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_242: # %else902
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 8
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_243
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_756
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_243: # %else906
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 16
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_244
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_757
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_244: # %else910
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 32
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_245
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_758
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_245: # %else914
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 64
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_246
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_759
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_246: # %else918
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 128
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_247
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_760
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_247: # %else922
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 256
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_248
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_761
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_248: # %else926
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 512
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_249
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_762
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_249: # %else930
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_250
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_763
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_250: # %else934
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 20
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_251
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_764
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_251: # %else938
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 19
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_252
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_765
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_252: # %else942
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 18
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_253
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_766
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_253: # %else946
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 17
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_254
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_767
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_254: # %else950
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 16
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_255
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_768
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_255: # %else954
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 15
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_256
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_769
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_256: # %else958
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 14
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_257
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_770
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_257: # %else962
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 13
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_258
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_771
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_258: # %else966
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 12
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_259
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_772
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_259: # %else970
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 11
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_260
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_773
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_260: # %else974
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 10
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_261
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_774
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_261: # %else978
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 9
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_262
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_775
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_262: # %else982
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 8
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_263
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_776
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_263: # %else986
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 7
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_264
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_777
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_264: # %else990
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 6
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_265
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_778
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_265: # %else994
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 5
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_266
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_779
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_266: # %else998
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 4
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_267
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_780
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_267: # %else1002
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 3
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_268
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_781
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_268: # %else1006
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 2
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_270
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_269: # %cond.load1009
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 254
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 253
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_270: # %else1010
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 1
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslidedown.vi v16, v0, 4
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_272
+; CHECK-VRGATHER-RV32-NEXT:  # %bb.271: # %cond.load1013
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 255
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 254
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_272: # %else1014
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a3, v16
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_273
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_782
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_273: # %else1018
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_274
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_783
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_274: # %else1022
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 2
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_275
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_784
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_275: # %else1026
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 4
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_276
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_785
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_276: # %else1030
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 8
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_277
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_786
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_277: # %else1034
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 16
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_278
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_787
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_278: # %else1038
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 32
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_279
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_788
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_279: # %else1042
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 64
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_280
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_789
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_280: # %else1046
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 128
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_281
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_790
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_281: # %else1050
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 256
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_282
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_791
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_282: # %else1054
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_283
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_792
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_283: # %else1058
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_284
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_793
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_284: # %else1062
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 20
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_285
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_794
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_285: # %else1066
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 19
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_286
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_795
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_286: # %else1070
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 18
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_287
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_796
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_287: # %else1074
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 17
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_288
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_797
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_288: # %else1078
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 16
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_289
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_798
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_289: # %else1082
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 15
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_290
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_799
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_290: # %else1086
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 14
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_291
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_800
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_291: # %else1090
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 13
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_292
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_801
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_292: # %else1094
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 12
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_293
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_802
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_293: # %else1098
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 11
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_294
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_803
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_294: # %else1102
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 10
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_295
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_804
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_295: # %else1106
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 9
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_296
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_805
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_296: # %else1110
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 8
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_297
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_806
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_297: # %else1114
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 7
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_298
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_807
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_298: # %else1118
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 6
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_299
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_808
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_299: # %else1122
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 5
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_300
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_809
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_300: # %else1126
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 4
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_301
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_810
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_301: # %else1130
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 3
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_302
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_811
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_302: # %else1134
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 2
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_304
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_303: # %cond.load1137
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 286
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 285
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_304: # %else1138
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 1
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vsrl.vx v16, v16, a1
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_306
+; CHECK-VRGATHER-RV32-NEXT:  # %bb.305: # %cond.load1141
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 287
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 286
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_306: # %else1142
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a2, v16
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_307
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_812
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_307: # %else1146
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_308
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_813
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_308: # %else1150
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 2
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_309
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_814
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_309: # %else1154
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 4
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_310
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_815
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_310: # %else1158
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 8
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_311
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_816
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_311: # %else1162
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 16
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_312
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_817
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_312: # %else1166
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 32
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_313
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_818
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_313: # %else1170
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 64
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_314
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_819
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_314: # %else1174
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 128
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_315
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_820
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_315: # %else1178
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 256
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_316
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_821
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_316: # %else1182
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 512
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_317
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_822
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_317: # %else1186
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_318
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_823
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_318: # %else1190
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 20
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_319
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_824
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_319: # %else1194
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 19
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_320
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_825
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_320: # %else1198
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 18
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_321
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_826
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_321: # %else1202
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 17
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_322
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_827
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_322: # %else1206
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 16
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_323
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_828
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_323: # %else1210
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 15
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_324
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_829
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_324: # %else1214
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 14
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_325
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_830
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_325: # %else1218
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 13
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_326
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_831
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_326: # %else1222
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 12
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_327
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_832
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_327: # %else1226
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 11
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_328
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_833
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_328: # %else1230
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 10
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_329
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_834
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_329: # %else1234
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 9
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_330
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_835
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_330: # %else1238
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 8
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_331
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_836
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_331: # %else1242
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 7
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_332
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_837
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_332: # %else1246
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 6
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_333
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_838
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_333: # %else1250
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 5
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_334
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_839
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_334: # %else1254
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 4
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_335
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_840
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_335: # %else1258
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 3
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_336
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_841
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_336: # %else1262
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 2
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_338
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_337: # %cond.load1265
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 318
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 317
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_338: # %else1266
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 1
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslidedown.vi v16, v0, 5
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_340
+; CHECK-VRGATHER-RV32-NEXT:  # %bb.339: # %cond.load1269
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 319
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 318
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_340: # %else1270
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a3, v16
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_341
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_842
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_341: # %else1274
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_342
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_843
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_342: # %else1278
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 2
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_343
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_844
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_343: # %else1282
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 4
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_344
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_845
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_344: # %else1286
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 8
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_345
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_846
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_345: # %else1290
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 16
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_346
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_847
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_346: # %else1294
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 32
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_347
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_848
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_347: # %else1298
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 64
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_348
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_849
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_348: # %else1302
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 128
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_349
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_850
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_349: # %else1306
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 256
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_350
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_851
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_350: # %else1310
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_351
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_852
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_351: # %else1314
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_352
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_853
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_352: # %else1318
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 20
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_353
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_854
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_353: # %else1322
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 19
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_354
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_855
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_354: # %else1326
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 18
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_355
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_856
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_355: # %else1330
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 17
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_356
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_857
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_356: # %else1334
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 16
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_357
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_858
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_357: # %else1338
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 15
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_358
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_859
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_358: # %else1342
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 14
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_359
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_860
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_359: # %else1346
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 13
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_360
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_861
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_360: # %else1350
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 12
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_361
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_862
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_361: # %else1354
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 11
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_362
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_863
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_362: # %else1358
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 10
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_363
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_864
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_363: # %else1362
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 9
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_364
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_865
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_364: # %else1366
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 8
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_365
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_866
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_365: # %else1370
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 7
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_366
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_867
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_366: # %else1374
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 6
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_367
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_868
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_367: # %else1378
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 5
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_368
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_869
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_368: # %else1382
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 4
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_369
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_870
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_369: # %else1386
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 3
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_370
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_871
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_370: # %else1390
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 2
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_372
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_371: # %cond.load1393
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 350
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 349
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_372: # %else1394
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 1
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vsrl.vx v16, v16, a1
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_374
+; CHECK-VRGATHER-RV32-NEXT:  # %bb.373: # %cond.load1397
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 351
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 350
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_374: # %else1398
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a2, v16
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_375
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_872
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_375: # %else1402
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_376
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_873
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_376: # %else1406
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 2
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_377
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_874
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_377: # %else1410
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 4
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_378
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_875
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_378: # %else1414
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 8
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_379
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_876
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_379: # %else1418
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 16
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_380
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_877
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_380: # %else1422
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 32
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_381
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_878
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_381: # %else1426
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 64
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_382
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_879
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_382: # %else1430
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 128
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_383
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_880
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_383: # %else1434
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 256
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_384
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_881
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_384: # %else1438
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 512
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_385
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_882
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_385: # %else1442
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_386
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_883
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_386: # %else1446
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 20
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_387
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_884
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_387: # %else1450
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 19
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_388
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_885
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_388: # %else1454
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 18
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_389
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_886
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_389: # %else1458
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 17
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_390
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_887
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_390: # %else1462
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 16
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_391
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_888
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_391: # %else1466
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 15
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_392
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_889
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_392: # %else1470
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 14
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_393
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_890
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_393: # %else1474
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 13
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_394
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_891
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_394: # %else1478
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 12
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_395
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_892
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_395: # %else1482
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 11
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_396
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_893
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_396: # %else1486
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 10
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_397
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_894
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_397: # %else1490
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 9
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_398
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_895
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_398: # %else1494
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 8
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_399
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_896
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_399: # %else1498
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 7
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_400
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_897
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_400: # %else1502
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 6
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_401
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_898
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_401: # %else1506
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 5
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_402
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_899
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_402: # %else1510
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 4
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_403
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_900
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_403: # %else1514
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 3
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_404
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_901
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_404: # %else1518
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 2
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_406
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_405: # %cond.load1521
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 382
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 381
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_406: # %else1522
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 1
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslidedown.vi v16, v0, 6
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_408
+; CHECK-VRGATHER-RV32-NEXT:  # %bb.407: # %cond.load1525
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 383
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 382
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_408: # %else1526
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a3, v16
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_409
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_902
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_409: # %else1530
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_410
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_903
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_410: # %else1534
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 2
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_411
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_904
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_411: # %else1538
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 4
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_412
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_905
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_412: # %else1542
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 8
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_413
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_906
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_413: # %else1546
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 16
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_414
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_907
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_414: # %else1550
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 32
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_415
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_908
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_415: # %else1554
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 64
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_416
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_909
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_416: # %else1558
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 128
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_417
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_910
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_417: # %else1562
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 256
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_418
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_911
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_418: # %else1566
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_419
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_912
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_419: # %else1570
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_420
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_913
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_420: # %else1574
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 20
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_421
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_914
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_421: # %else1578
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 19
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_422
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_915
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_422: # %else1582
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 18
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_423
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_916
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_423: # %else1586
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 17
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_424
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_917
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_424: # %else1590
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 16
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_425
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_918
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_425: # %else1594
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 15
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_426
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_919
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_426: # %else1598
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 14
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_427
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_920
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_427: # %else1602
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 13
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_428
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_921
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_428: # %else1606
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 12
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_429
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_922
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_429: # %else1610
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 11
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_430
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_923
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_430: # %else1614
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 10
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_431
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_924
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_431: # %else1618
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 9
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_432
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_925
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_432: # %else1622
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 8
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_433
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_926
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_433: # %else1626
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 7
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_434
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_927
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_434: # %else1630
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 6
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_435
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_928
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_435: # %else1634
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 5
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_436
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_929
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_436: # %else1638
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 4
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_437
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_930
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_437: # %else1642
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 3
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_438
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_931
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_438: # %else1646
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 2
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_440
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_439: # %cond.load1649
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 414
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 413
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_440: # %else1650
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 1
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vsrl.vx v16, v16, a1
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_442
+; CHECK-VRGATHER-RV32-NEXT:  # %bb.441: # %cond.load1653
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 415
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 414
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_442: # %else1654
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a2, v16
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_443
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_932
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_443: # %else1658
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_444
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_933
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_444: # %else1662
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 2
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_445
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_934
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_445: # %else1666
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 4
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_446
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_935
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_446: # %else1670
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 8
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_447
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_936
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_447: # %else1674
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 16
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_448
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_937
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_448: # %else1678
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 32
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_449
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_938
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_449: # %else1682
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 64
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_450
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_939
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_450: # %else1686
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 128
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_451
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_940
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_451: # %else1690
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 256
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_452
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_941
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_452: # %else1694
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 512
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_453
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_942
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_453: # %else1698
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_454
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_943
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_454: # %else1702
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 20
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_455
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_944
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_455: # %else1706
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 19
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_456
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_945
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_456: # %else1710
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 18
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_457
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_946
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_457: # %else1714
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 17
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_458
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_947
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_458: # %else1718
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 16
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_459
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_948
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_459: # %else1722
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 15
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_460
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_949
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_460: # %else1726
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 14
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_461
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_950
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_461: # %else1730
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 13
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_462
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_951
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_462: # %else1734
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 12
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_463
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_952
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_463: # %else1738
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 11
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_464
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_953
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_464: # %else1742
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 10
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_465
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_954
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_465: # %else1746
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 9
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_466
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_955
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_466: # %else1750
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 8
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_467
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_956
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_467: # %else1754
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 7
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_468
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_957
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_468: # %else1758
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 6
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_469
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_958
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_469: # %else1762
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 5
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_470
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_959
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_470: # %else1766
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 4
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_471
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_960
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_471: # %else1770
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 3
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_472
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_961
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_472: # %else1774
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 2
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_474
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_473: # %cond.load1777
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 446
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 445
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_474: # %else1778
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 1
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslidedown.vi v16, v0, 7
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_476
+; CHECK-VRGATHER-RV32-NEXT:  # %bb.475: # %cond.load1781
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 447
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 446
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_476: # %else1782
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a3, v16
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_477
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_962
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_477: # %else1786
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_478
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_963
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_478: # %else1790
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 2
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_479
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_964
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_479: # %else1794
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 4
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_480
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_965
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_480: # %else1798
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 8
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_481
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_966
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_481: # %else1802
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 16
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_482
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_967
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_482: # %else1806
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 32
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_483
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_968
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_483: # %else1810
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 64
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_484
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_969
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_484: # %else1814
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 128
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_485
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_970
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_485: # %else1818
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 256
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_486
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_971
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_486: # %else1822
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_487
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_972
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_487: # %else1826
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_488
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_973
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_488: # %else1830
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 20
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_489
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_974
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_489: # %else1834
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 19
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_490
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_975
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_490: # %else1838
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 18
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_491
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_976
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_491: # %else1842
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 17
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_492
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_977
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_492: # %else1846
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 16
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_493
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_978
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_493: # %else1850
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 15
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_494
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_979
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_494: # %else1854
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 14
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_495
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_980
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_495: # %else1858
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 13
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_496
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_981
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_496: # %else1862
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 12
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_497
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_982
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_497: # %else1866
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 11
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_498
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_983
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_498: # %else1870
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 10
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_499
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_984
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_499: # %else1874
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 9
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_500
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_985
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_500: # %else1878
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 8
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_501
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_986
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_501: # %else1882
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 7
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_502
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_987
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_502: # %else1886
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 6
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_503
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_988
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_503: # %else1890
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 5
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_504
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_989
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_504: # %else1894
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 4
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_505
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_990
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_505: # %else1898
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 3
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_506
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_991
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_506: # %else1902
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 2
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_508
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_507: # %cond.load1905
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 478
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 477
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_508: # %else1906
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 1
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vsrl.vx v16, v16, a1
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_510
+; CHECK-VRGATHER-RV32-NEXT:  # %bb.509: # %cond.load1909
+; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a1
+; CHECK-VRGATHER-RV32-NEXT:    li a1, 479
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 478
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_510: # %else1910
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a1, v16
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_511
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_992
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_511: # %else1914
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 1
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_512
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_993
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_512: # %else1918
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 2
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_513
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_994
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_513: # %else1922
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 4
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_514
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_995
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_514: # %else1926
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 8
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_515
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_996
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_515: # %else1930
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 16
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_516
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_997
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_516: # %else1934
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 32
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_517
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_998
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_517: # %else1938
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 64
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_518
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_999
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_518: # %else1942
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 128
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_519
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1000
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_519: # %else1946
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 256
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_520
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1001
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_520: # %else1950
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 512
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_521
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1002
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_521: # %else1954
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 1024
+; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_522
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1003
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_522: # %else1958
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 20
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_523
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1004
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_523: # %else1962
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 19
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_524
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1005
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_524: # %else1966
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 18
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_525
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1006
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_525: # %else1970
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 17
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_526
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1007
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_526: # %else1974
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 16
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_527
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1008
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_527: # %else1978
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 15
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_528
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1009
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_528: # %else1982
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 14
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_529
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1010
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_529: # %else1986
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 13
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_530
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1011
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_530: # %else1990
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 12
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_531
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1012
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_531: # %else1994
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 11
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_532
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1013
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_532: # %else1998
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 10
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_533
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1014
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_533: # %else2002
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 9
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_534
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1015
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_534: # %else2006
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 8
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_535
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1016
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_535: # %else2010
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 7
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_536
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1017
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_536: # %else2014
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 6
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_537
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1018
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_537: # %else2018
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 5
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_538
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1019
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_538: # %else2022
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 4
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_539
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1020
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_539: # %else2026
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 3
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_540
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1021
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_540: # %else2030
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 2
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_541
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1022
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_541: # %else2034
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 1
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_542
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1023
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_542: # %else2038
+; CHECK-VRGATHER-RV32-NEXT:    bgez a1, .LBB61_543
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1024
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_543: # %else2042
+; CHECK-VRGATHER-RV32-NEXT:    ret
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_544: # %cond.load
+; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, zero, e8, mf8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v8, a1
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 2
+; CHECK-VRGATHER-RV32-NEXT:    bnez a1, .LBB61_545
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_2
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_545: # %cond.load1
+; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, zero, e8, mf8, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 2, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 1
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 4
+; CHECK-VRGATHER-RV32-NEXT:    bnez a1, .LBB61_546
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_3
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_546: # %cond.load5
+; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 3, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 2
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 8
+; CHECK-VRGATHER-RV32-NEXT:    bnez a1, .LBB61_547
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_4
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_547: # %cond.load9
+; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 4, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 3
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 16
+; CHECK-VRGATHER-RV32-NEXT:    bnez a1, .LBB61_548
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_5
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_548: # %cond.load13
+; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 5, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 32
+; CHECK-VRGATHER-RV32-NEXT:    bnez a1, .LBB61_549
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_6
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_549: # %cond.load17
+; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 6, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 5
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 64
+; CHECK-VRGATHER-RV32-NEXT:    bnez a1, .LBB61_550
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_7
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_550: # %cond.load21
+; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 7, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 6
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 128
+; CHECK-VRGATHER-RV32-NEXT:    bnez a1, .LBB61_551
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_8
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_551: # %cond.load25
+; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 8, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 7
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 256
+; CHECK-VRGATHER-RV32-NEXT:    bnez a1, .LBB61_552
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_9
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_552: # %cond.load29
+; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 9, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 8
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    bnez a1, .LBB61_553
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_10
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_553: # %cond.load33
+; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 10, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 9
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 1024
+; CHECK-VRGATHER-RV32-NEXT:    bnez a1, .LBB61_554
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_11
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_554: # %cond.load37
+; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 11, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 10
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 20
+; CHECK-VRGATHER-RV32-NEXT:    bltz a1, .LBB61_555
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_12
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_555: # %cond.load41
+; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 12, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 11
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 19
+; CHECK-VRGATHER-RV32-NEXT:    bltz a1, .LBB61_556
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_13
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_556: # %cond.load45
+; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 13, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 12
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 18
+; CHECK-VRGATHER-RV32-NEXT:    bltz a1, .LBB61_557
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_14
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_557: # %cond.load49
+; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 14, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 13
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 17
+; CHECK-VRGATHER-RV32-NEXT:    bltz a1, .LBB61_558
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_15
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_558: # %cond.load53
+; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 15, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 14
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 16
+; CHECK-VRGATHER-RV32-NEXT:    bltz a1, .LBB61_559
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_16
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_559: # %cond.load57
+; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 16, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 15
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 15
+; CHECK-VRGATHER-RV32-NEXT:    bltz a1, .LBB61_560
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_17
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_560: # %cond.load61
+; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 17, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 16
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 14
+; CHECK-VRGATHER-RV32-NEXT:    bltz a1, .LBB61_561
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_18
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_561: # %cond.load65
+; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 18, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 17
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 13
+; CHECK-VRGATHER-RV32-NEXT:    bltz a1, .LBB61_562
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_19
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_562: # %cond.load69
+; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 19, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 18
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 12
+; CHECK-VRGATHER-RV32-NEXT:    bltz a1, .LBB61_563
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_20
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_563: # %cond.load73
+; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 20, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 19
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 11
+; CHECK-VRGATHER-RV32-NEXT:    bltz a1, .LBB61_564
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_21
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_564: # %cond.load77
+; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 21, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 20
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 10
+; CHECK-VRGATHER-RV32-NEXT:    bltz a1, .LBB61_565
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_22
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_565: # %cond.load81
+; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 22, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 21
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 9
+; CHECK-VRGATHER-RV32-NEXT:    bltz a1, .LBB61_566
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_23
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_566: # %cond.load85
+; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 23, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 22
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 8
+; CHECK-VRGATHER-RV32-NEXT:    bltz a1, .LBB61_567
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_24
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_567: # %cond.load89
+; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 24, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 23
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 7
+; CHECK-VRGATHER-RV32-NEXT:    bltz a1, .LBB61_568
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_25
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_568: # %cond.load93
+; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 25, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 24
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 6
+; CHECK-VRGATHER-RV32-NEXT:    bltz a1, .LBB61_569
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_26
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_569: # %cond.load97
+; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 26, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 25
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 5
+; CHECK-VRGATHER-RV32-NEXT:    bltz a1, .LBB61_570
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_27
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_570: # %cond.load101
+; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 27, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 26
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 4
+; CHECK-VRGATHER-RV32-NEXT:    bltz a1, .LBB61_571
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_28
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_571: # %cond.load105
+; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 28, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 27
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 3
+; CHECK-VRGATHER-RV32-NEXT:    bgez a1, .LBB61_1025
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_29
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1025: # %cond.load105
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_30
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_572: # %cond.load121
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 32
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v24, 31
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_573
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_36
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_573: # %cond.load125
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 33
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 32
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 2
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_574
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_37
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_574: # %cond.load129
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 34
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 33
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 4
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_575
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_38
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_575: # %cond.load133
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 35
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 34
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 8
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_576
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_39
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_576: # %cond.load137
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 36
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 35
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 16
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_577
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_40
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_577: # %cond.load141
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 37
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 36
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 32
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_578
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_41
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_578: # %cond.load145
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 38
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 37
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 64
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_579
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_42
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_579: # %cond.load149
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 39
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 38
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 128
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_580
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_43
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_580: # %cond.load153
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 40
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 39
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 256
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_581
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_44
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_581: # %cond.load157
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 41
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 40
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 512
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_582
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_45
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_582: # %cond.load161
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 42
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 41
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_583
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_46
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_583: # %cond.load165
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 43
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 42
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 20
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_584
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_47
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_584: # %cond.load169
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 44
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 43
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 19
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_585
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_48
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_585: # %cond.load173
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 45
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 44
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 18
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_586
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_49
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_586: # %cond.load177
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 46
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 45
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 17
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_587
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_50
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_587: # %cond.load181
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 47
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 46
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 16
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_588
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_51
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_588: # %cond.load185
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 48
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 47
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 15
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_589
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_52
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_589: # %cond.load189
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 49
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 48
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 14
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_590
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_53
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_590: # %cond.load193
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 50
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 49
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 13
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_591
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_54
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_591: # %cond.load197
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 51
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 50
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 12
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_592
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_55
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_592: # %cond.load201
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 52
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 51
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 11
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_593
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_56
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_593: # %cond.load205
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 53
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 52
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 10
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_594
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_57
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_594: # %cond.load209
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 54
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 53
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 9
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_595
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_58
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_595: # %cond.load213
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 55
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 54
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 8
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_596
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_59
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_596: # %cond.load217
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 56
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 55
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 7
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_597
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_60
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_597: # %cond.load221
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 57
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 56
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 6
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_598
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_61
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_598: # %cond.load225
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 58
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 57
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 5
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_599
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_62
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_599: # %cond.load229
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 59
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 58
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 4
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_600
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_63
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_600: # %cond.load233
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 60
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 59
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 3
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_601
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_64
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_601: # %cond.load237
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 61
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 60
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 2
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_1026
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_65
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1026: # %cond.load237
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_66
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_602: # %cond.load249
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v17, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 64
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 63
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m1, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v17, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_603
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_70
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_603: # %cond.load253
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 65
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 64
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 2
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_604
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_71
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_604: # %cond.load257
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 66
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 65
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 4
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_605
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_72
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_605: # %cond.load261
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 67
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 66
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 8
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_606
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_73
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_606: # %cond.load265
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 68
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 67
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 16
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_607
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_74
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_607: # %cond.load269
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 69
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 68
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 32
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_608
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_75
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_608: # %cond.load273
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 70
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 69
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 64
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_609
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_76
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_609: # %cond.load277
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 71
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 70
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 128
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_610
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_77
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_610: # %cond.load281
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 72
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 71
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 256
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_611
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_78
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_611: # %cond.load285
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 73
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 72
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_612
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_79
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_612: # %cond.load289
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 74
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 73
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_613
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_80
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_613: # %cond.load293
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 75
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 74
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 20
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_614
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_81
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_614: # %cond.load297
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 76
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 75
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 19
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_615
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_82
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_615: # %cond.load301
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 77
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 76
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 18
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_616
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_83
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_616: # %cond.load305
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 78
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 77
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 17
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_617
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_84
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_617: # %cond.load309
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 79
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 78
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 16
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_618
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_85
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_618: # %cond.load313
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 80
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 79
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 15
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_619
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_86
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_619: # %cond.load317
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 81
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 80
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 14
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_620
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_87
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_620: # %cond.load321
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 82
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 81
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 13
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_621
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_88
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_621: # %cond.load325
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 83
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 82
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 12
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_622
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_89
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_622: # %cond.load329
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 84
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 83
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 11
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_623
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_90
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_623: # %cond.load333
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 85
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 84
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 10
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_624
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_91
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_624: # %cond.load337
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 86
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 85
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 9
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_625
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_92
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_625: # %cond.load341
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 87
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 86
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 8
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_626
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_93
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_626: # %cond.load345
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 88
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 87
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 7
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_627
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_94
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_627: # %cond.load349
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 89
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 88
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 6
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_628
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_95
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_628: # %cond.load353
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 90
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 89
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 5
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_629
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_96
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_629: # %cond.load357
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 91
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 90
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 4
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_630
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_97
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_630: # %cond.load361
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 92
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 91
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 3
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_631
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_98
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_631: # %cond.load365
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 93
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 92
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 2
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_1027
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_99
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1027: # %cond.load365
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_100
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_632: # %cond.load377
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 96
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 95
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_633
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_104
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_633: # %cond.load381
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 97
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 96
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 2
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_634
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_105
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_634: # %cond.load385
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 98
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 97
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 4
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_635
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_106
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_635: # %cond.load389
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 99
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 98
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 8
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_636
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_107
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_636: # %cond.load393
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 100
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 99
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 16
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_637
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_108
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_637: # %cond.load397
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 101
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 100
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 32
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_638
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_109
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_638: # %cond.load401
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 102
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 101
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 64
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_639
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_110
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_639: # %cond.load405
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 103
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 102
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 128
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_640
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_111
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_640: # %cond.load409
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 104
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 103
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 256
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_641
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_112
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_641: # %cond.load413
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 105
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 104
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 512
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_642
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_113
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_642: # %cond.load417
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 106
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 105
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_643
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_114
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_643: # %cond.load421
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 107
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 106
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 20
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_644
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_115
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_644: # %cond.load425
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 108
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 107
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 19
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_645
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_116
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_645: # %cond.load429
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 109
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 108
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 18
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_646
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_117
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_646: # %cond.load433
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 110
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 109
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 17
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_647
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_118
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_647: # %cond.load437
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 111
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 110
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 16
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_648
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_119
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_648: # %cond.load441
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 112
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 111
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 15
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_649
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_120
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_649: # %cond.load445
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 113
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 112
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 14
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_650
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_121
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_650: # %cond.load449
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 114
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 113
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 13
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_651
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_122
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_651: # %cond.load453
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 115
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 114
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 12
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_652
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_123
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_652: # %cond.load457
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 116
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 115
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 11
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_653
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_124
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_653: # %cond.load461
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 117
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 116
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 10
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_654
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_125
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_654: # %cond.load465
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 118
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 117
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 9
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_655
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_126
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_655: # %cond.load469
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 119
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 118
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 8
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_656
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_127
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_656: # %cond.load473
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 120
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 119
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 7
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_657
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_128
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_657: # %cond.load477
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 121
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 120
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 6
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_658
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_129
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_658: # %cond.load481
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 122
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 121
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 5
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_659
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_130
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_659: # %cond.load485
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 123
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 122
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 4
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_660
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_131
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_660: # %cond.load489
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 124
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 123
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 3
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_661
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_132
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_661: # %cond.load493
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 125
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 124
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 2
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_1028
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_133
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1028: # %cond.load493
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_134
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_662: # %cond.load505
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 128
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 127
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_663
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_138
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_663: # %cond.load509
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 129
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 128
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 2
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_664
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_139
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_664: # %cond.load513
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 130
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 129
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 4
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_665
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_140
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_665: # %cond.load517
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 131
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 130
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 8
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_666
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_141
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_666: # %cond.load521
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 132
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 131
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 16
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_667
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_142
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_667: # %cond.load525
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 133
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 132
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 32
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_668
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_143
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_668: # %cond.load529
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 134
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 133
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 64
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_669
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_144
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_669: # %cond.load533
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 135
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 134
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 128
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_670
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_145
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_670: # %cond.load537
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 136
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 135
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 256
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_671
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_146
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_671: # %cond.load541
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 137
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 136
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_672
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_147
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_672: # %cond.load545
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 138
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 137
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_673
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_148
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_673: # %cond.load549
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 139
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 138
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 20
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_674
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_149
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_674: # %cond.load553
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 140
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 139
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 19
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_675
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_150
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_675: # %cond.load557
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 141
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 140
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 18
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_676
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_151
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_676: # %cond.load561
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 142
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 141
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 17
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_677
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_152
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_677: # %cond.load565
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 143
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 142
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 16
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_678
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_153
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_678: # %cond.load569
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 144
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 143
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 15
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_679
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_154
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_679: # %cond.load573
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 145
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 144
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 14
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_680
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_155
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_680: # %cond.load577
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 146
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 145
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 13
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_681
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_156
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_681: # %cond.load581
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 147
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 146
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 12
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_682
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_157
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_682: # %cond.load585
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 148
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 147
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 11
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_683
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_158
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_683: # %cond.load589
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 149
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 148
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 10
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_684
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_159
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_684: # %cond.load593
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 150
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 149
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 9
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_685
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_160
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_685: # %cond.load597
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 151
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 150
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 8
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_686
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_161
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_686: # %cond.load601
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 152
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 151
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 7
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_687
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_162
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_687: # %cond.load605
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 153
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 152
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 6
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_688
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_163
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_688: # %cond.load609
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 154
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 153
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 5
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_689
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_164
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_689: # %cond.load613
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 155
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 154
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 4
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_690
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_165
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_690: # %cond.load617
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 156
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 155
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 3
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_691
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_166
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_691: # %cond.load621
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 157
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 156
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 2
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_1029
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_167
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1029: # %cond.load621
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_168
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_692: # %cond.load633
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 160
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 159
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_693
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_172
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_693: # %cond.load637
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 161
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 160
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 2
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_694
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_173
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_694: # %cond.load641
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 162
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 161
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 4
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_695
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_174
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_695: # %cond.load645
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 163
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 162
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 8
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_696
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_175
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_696: # %cond.load649
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 164
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 163
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 16
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_697
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_176
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_697: # %cond.load653
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 165
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 164
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 32
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_698
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_177
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_698: # %cond.load657
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 166
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 165
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 64
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_699
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_178
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_699: # %cond.load661
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 167
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 166
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 128
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_700
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_179
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_700: # %cond.load665
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 168
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 167
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 256
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_701
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_180
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_701: # %cond.load669
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 169
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 168
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 512
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_702
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_181
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_702: # %cond.load673
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 170
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 169
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_703
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_182
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_703: # %cond.load677
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 171
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 170
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 20
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_704
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_183
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_704: # %cond.load681
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 172
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 171
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 19
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_705
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_184
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_705: # %cond.load685
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 173
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 172
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 18
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_706
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_185
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_706: # %cond.load689
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 174
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 173
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 17
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_707
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_186
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_707: # %cond.load693
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 175
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 174
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 16
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_708
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_187
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_708: # %cond.load697
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 176
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 175
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 15
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_709
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_188
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_709: # %cond.load701
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 177
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 176
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 14
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_710
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_189
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_710: # %cond.load705
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 178
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 177
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 13
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_711
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_190
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_711: # %cond.load709
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 179
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 178
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 12
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_712
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_191
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_712: # %cond.load713
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 180
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 179
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 11
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_713
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_192
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_713: # %cond.load717
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 181
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 180
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 10
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_714
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_193
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_714: # %cond.load721
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 182
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 181
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 9
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_715
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_194
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_715: # %cond.load725
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 183
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 182
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 8
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_716
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_195
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_716: # %cond.load729
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 184
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 183
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 7
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_717
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_196
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_717: # %cond.load733
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 185
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 184
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 6
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_718
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_197
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_718: # %cond.load737
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 186
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 185
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 5
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_719
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_198
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_719: # %cond.load741
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 187
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 186
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 4
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_720
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_199
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_720: # %cond.load745
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 188
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 187
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 3
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_721
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_200
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_721: # %cond.load749
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 189
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 188
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 2
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_1030
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_201
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1030: # %cond.load749
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_202
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_722: # %cond.load761
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 192
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 191
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_723
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_206
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_723: # %cond.load765
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 193
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 192
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 2
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_724
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_207
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_724: # %cond.load769
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 194
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 193
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 4
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_725
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_208
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_725: # %cond.load773
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 195
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 194
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 8
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_726
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_209
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_726: # %cond.load777
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 196
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 195
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 16
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_727
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_210
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_727: # %cond.load781
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 197
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 196
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 32
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_728
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_211
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_728: # %cond.load785
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 198
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 197
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 64
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_729
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_212
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_729: # %cond.load789
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 199
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 198
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 128
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_730
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_213
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_730: # %cond.load793
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 200
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 199
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 256
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_731
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_214
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_731: # %cond.load797
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 201
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 200
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_732
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_215
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_732: # %cond.load801
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 202
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 201
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_733
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_216
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_733: # %cond.load805
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 203
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 202
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 20
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_734
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_217
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_734: # %cond.load809
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 204
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 203
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 19
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_735
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_218
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_735: # %cond.load813
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 205
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 204
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 18
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_736
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_219
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_736: # %cond.load817
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 206
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 205
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 17
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_737
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_220
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_737: # %cond.load821
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 207
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 206
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 16
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_738
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_221
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_738: # %cond.load825
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 208
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 207
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 15
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_739
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_222
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_739: # %cond.load829
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 209
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 208
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 14
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_740
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_223
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_740: # %cond.load833
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 210
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 209
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 13
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_741
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_224
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_741: # %cond.load837
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 211
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 210
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 12
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_742
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_225
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_742: # %cond.load841
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 212
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 211
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 11
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_743
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_226
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_743: # %cond.load845
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 213
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 212
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 10
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_744
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_227
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_744: # %cond.load849
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 214
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 213
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 9
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_745
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_228
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_745: # %cond.load853
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 215
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 214
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 8
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_746
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_229
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_746: # %cond.load857
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 216
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 215
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 7
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_747
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_230
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_747: # %cond.load861
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 217
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 216
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 6
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_748
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_231
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_748: # %cond.load865
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 218
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 217
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 5
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_749
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_232
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_749: # %cond.load869
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 219
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 218
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 4
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_750
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_233
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_750: # %cond.load873
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 220
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 219
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 3
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_751
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_234
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_751: # %cond.load877
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 221
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 220
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 2
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_1031
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_235
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1031: # %cond.load877
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_236
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_752: # %cond.load889
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 224
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 223
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_753
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_240
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_753: # %cond.load893
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 225
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 224
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 2
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_754
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_241
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_754: # %cond.load897
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 226
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 225
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 4
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_755
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_242
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_755: # %cond.load901
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 227
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 226
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 8
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_756
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_243
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_756: # %cond.load905
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 228
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 227
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 16
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_757
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_244
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_757: # %cond.load909
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 229
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 228
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 32
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_758
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_245
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_758: # %cond.load913
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 230
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 229
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 64
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_759
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_246
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_759: # %cond.load917
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 231
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 230
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 128
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_760
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_247
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_760: # %cond.load921
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 232
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 231
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 256
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_761
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_248
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_761: # %cond.load925
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 233
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 232
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 512
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_762
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_249
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_762: # %cond.load929
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 234
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 233
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_763
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_250
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_763: # %cond.load933
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 235
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 234
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 20
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_764
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_251
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_764: # %cond.load937
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 236
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 235
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 19
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_765
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_252
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_765: # %cond.load941
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 237
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 236
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 18
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_766
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_253
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_766: # %cond.load945
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 238
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 237
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 17
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_767
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_254
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_767: # %cond.load949
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 239
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 238
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 16
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_768
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_255
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_768: # %cond.load953
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 240
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 239
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 15
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_769
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_256
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_769: # %cond.load957
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 241
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 240
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 14
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_770
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_257
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_770: # %cond.load961
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 242
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 241
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 13
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_771
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_258
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_771: # %cond.load965
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 243
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 242
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 12
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_772
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_259
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_772: # %cond.load969
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 244
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 243
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 11
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_773
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_260
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_773: # %cond.load973
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 245
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 244
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 10
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_774
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_261
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_774: # %cond.load977
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 246
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 245
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 9
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_775
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_262
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_775: # %cond.load981
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 247
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 246
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 8
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_776
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_263
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_776: # %cond.load985
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 248
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 247
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 7
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_777
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_264
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_777: # %cond.load989
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 249
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 248
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 6
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_778
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_265
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_778: # %cond.load993
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 250
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 249
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 5
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_779
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_266
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_779: # %cond.load997
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 251
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 250
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 4
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_780
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_267
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_780: # %cond.load1001
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 252
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 251
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 3
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_781
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_268
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_781: # %cond.load1005
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 253
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 252
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 2
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_1032
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_269
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1032: # %cond.load1005
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_270
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_782: # %cond.load1017
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 256
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 255
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_783
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_274
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_783: # %cond.load1021
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 257
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 256
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 2
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_784
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_275
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_784: # %cond.load1025
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 258
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 257
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 4
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_785
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_276
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_785: # %cond.load1029
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 259
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 258
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 8
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_786
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_277
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_786: # %cond.load1033
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 260
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 259
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 16
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_787
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_278
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_787: # %cond.load1037
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 261
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 260
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 32
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_788
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_279
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_788: # %cond.load1041
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 262
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 261
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 64
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_789
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_280
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_789: # %cond.load1045
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 263
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 262
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 128
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_790
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_281
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_790: # %cond.load1049
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 264
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 263
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 256
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_791
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_282
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_791: # %cond.load1053
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 265
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 264
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_792
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_283
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_792: # %cond.load1057
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 266
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 265
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_793
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_284
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_793: # %cond.load1061
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 267
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 266
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 20
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_794
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_285
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_794: # %cond.load1065
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 268
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 267
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 19
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_795
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_286
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_795: # %cond.load1069
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 269
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 268
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 18
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_796
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_287
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_796: # %cond.load1073
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 270
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 269
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 17
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_797
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_288
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_797: # %cond.load1077
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 271
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 270
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 16
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_798
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_289
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_798: # %cond.load1081
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 272
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 271
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 15
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_799
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_290
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_799: # %cond.load1085
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 273
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 272
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 14
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_800
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_291
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_800: # %cond.load1089
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 274
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 273
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 13
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_801
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_292
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_801: # %cond.load1093
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 275
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 274
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 12
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_802
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_293
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_802: # %cond.load1097
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 276
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 275
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 11
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_803
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_294
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_803: # %cond.load1101
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 277
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 276
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 10
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_804
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_295
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_804: # %cond.load1105
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 278
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 277
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 9
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_805
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_296
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_805: # %cond.load1109
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 279
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 278
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 8
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_806
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_297
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_806: # %cond.load1113
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 280
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 279
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 7
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_807
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_298
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_807: # %cond.load1117
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 281
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 280
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 6
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_808
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_299
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_808: # %cond.load1121
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 282
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 281
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 5
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_809
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_300
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_809: # %cond.load1125
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 283
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 282
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 4
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_810
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_301
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_810: # %cond.load1129
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 284
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 283
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 3
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_811
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_302
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_811: # %cond.load1133
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 285
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 284
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 2
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_1033
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_303
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1033: # %cond.load1133
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_304
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_812: # %cond.load1145
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 288
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 287
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_813
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_308
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_813: # %cond.load1149
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 289
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 288
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 2
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_814
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_309
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_814: # %cond.load1153
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 290
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 289
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 4
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_815
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_310
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_815: # %cond.load1157
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 291
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 290
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 8
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_816
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_311
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_816: # %cond.load1161
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 292
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 291
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 16
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_817
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_312
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_817: # %cond.load1165
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 293
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 292
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 32
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_818
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_313
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_818: # %cond.load1169
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 294
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 293
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 64
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_819
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_314
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_819: # %cond.load1173
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 295
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 294
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 128
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_820
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_315
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_820: # %cond.load1177
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 296
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 295
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 256
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_821
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_316
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_821: # %cond.load1181
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 297
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 296
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 512
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_822
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_317
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_822: # %cond.load1185
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 298
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 297
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_823
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_318
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_823: # %cond.load1189
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 299
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 298
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 20
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_824
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_319
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_824: # %cond.load1193
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 300
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 299
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 19
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_825
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_320
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_825: # %cond.load1197
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 301
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 300
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 18
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_826
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_321
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_826: # %cond.load1201
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 302
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 301
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 17
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_827
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_322
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_827: # %cond.load1205
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 303
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 302
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 16
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_828
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_323
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_828: # %cond.load1209
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 304
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 303
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 15
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_829
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_324
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_829: # %cond.load1213
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 305
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 304
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 14
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_830
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_325
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_830: # %cond.load1217
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 306
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 305
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 13
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_831
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_326
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_831: # %cond.load1221
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 307
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 306
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 12
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_832
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_327
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_832: # %cond.load1225
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 308
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 307
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 11
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_833
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_328
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_833: # %cond.load1229
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 309
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 308
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 10
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_834
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_329
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_834: # %cond.load1233
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 310
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 309
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 9
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_835
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_330
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_835: # %cond.load1237
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 311
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 310
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 8
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_836
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_331
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_836: # %cond.load1241
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 312
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 311
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 7
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_837
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_332
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_837: # %cond.load1245
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 313
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 312
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 6
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_838
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_333
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_838: # %cond.load1249
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 314
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 313
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 5
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_839
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_334
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_839: # %cond.load1253
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 315
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 314
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 4
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_840
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_335
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_840: # %cond.load1257
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 316
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 315
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 3
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_841
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_336
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_841: # %cond.load1261
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 317
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 316
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 2
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_1034
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_337
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1034: # %cond.load1261
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_338
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_842: # %cond.load1273
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 320
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 319
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_843
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_342
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_843: # %cond.load1277
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 321
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 320
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 2
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_844
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_343
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_844: # %cond.load1281
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 322
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 321
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 4
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_845
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_344
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_845: # %cond.load1285
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 323
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 322
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 8
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_846
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_345
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_846: # %cond.load1289
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 324
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 323
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 16
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_847
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_346
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_847: # %cond.load1293
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 325
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 324
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 32
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_848
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_347
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_848: # %cond.load1297
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 326
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 325
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 64
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_849
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_348
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_849: # %cond.load1301
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 327
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 326
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 128
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_850
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_349
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_850: # %cond.load1305
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 328
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 327
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 256
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_851
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_350
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_851: # %cond.load1309
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 329
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 328
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_852
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_351
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_852: # %cond.load1313
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 330
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 329
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_853
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_352
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_853: # %cond.load1317
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 331
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 330
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 20
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_854
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_353
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_854: # %cond.load1321
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 332
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 331
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 19
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_855
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_354
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_855: # %cond.load1325
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 333
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 332
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 18
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_856
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_355
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_856: # %cond.load1329
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 334
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 333
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 17
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_857
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_356
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_857: # %cond.load1333
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 335
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 334
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 16
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_858
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_357
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_858: # %cond.load1337
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 336
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 335
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 15
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_859
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_358
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_859: # %cond.load1341
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 337
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 336
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 14
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_860
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_359
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_860: # %cond.load1345
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 338
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 337
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 13
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_861
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_360
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_861: # %cond.load1349
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 339
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 338
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 12
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_862
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_361
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_862: # %cond.load1353
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 340
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 339
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 11
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_863
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_362
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_863: # %cond.load1357
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 341
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 340
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 10
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_864
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_363
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_864: # %cond.load1361
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 342
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 341
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 9
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_865
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_364
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_865: # %cond.load1365
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 343
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 342
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 8
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_866
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_365
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_866: # %cond.load1369
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 344
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 343
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 7
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_867
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_366
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_867: # %cond.load1373
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 345
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 344
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 6
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_868
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_367
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_868: # %cond.load1377
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 346
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 345
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 5
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_869
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_368
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_869: # %cond.load1381
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 347
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 346
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 4
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_870
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_369
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_870: # %cond.load1385
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 348
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 347
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 3
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_871
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_370
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_871: # %cond.load1389
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 349
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 348
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 2
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_1035
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_371
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1035: # %cond.load1389
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_372
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_872: # %cond.load1401
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 352
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 351
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_873
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_376
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_873: # %cond.load1405
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 353
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 352
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 2
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_874
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_377
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_874: # %cond.load1409
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 354
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 353
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 4
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_875
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_378
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_875: # %cond.load1413
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 355
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 354
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 8
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_876
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_379
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_876: # %cond.load1417
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 356
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 355
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 16
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_877
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_380
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_877: # %cond.load1421
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 357
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 356
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 32
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_878
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_381
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_878: # %cond.load1425
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 358
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 357
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 64
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_879
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_382
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_879: # %cond.load1429
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 359
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 358
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 128
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_880
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_383
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_880: # %cond.load1433
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 360
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 359
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 256
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_881
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_384
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_881: # %cond.load1437
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 361
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 360
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 512
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_882
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_385
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_882: # %cond.load1441
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 362
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 361
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_883
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_386
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_883: # %cond.load1445
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 363
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 362
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 20
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_884
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_387
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_884: # %cond.load1449
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 364
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 363
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 19
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_885
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_388
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_885: # %cond.load1453
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 365
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 364
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 18
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_886
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_389
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_886: # %cond.load1457
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 366
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 365
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 17
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_887
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_390
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_887: # %cond.load1461
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 367
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 366
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 16
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_888
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_391
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_888: # %cond.load1465
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 368
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 367
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 15
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_889
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_392
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_889: # %cond.load1469
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 369
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 368
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 14
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_890
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_393
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_890: # %cond.load1473
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 370
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 369
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 13
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_891
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_394
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_891: # %cond.load1477
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 371
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 370
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 12
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_892
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_395
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_892: # %cond.load1481
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 372
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 371
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 11
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_893
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_396
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_893: # %cond.load1485
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 373
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 372
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 10
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_894
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_397
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_894: # %cond.load1489
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 374
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 373
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 9
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_895
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_398
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_895: # %cond.load1493
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 375
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 374
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 8
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_896
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_399
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_896: # %cond.load1497
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 376
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 375
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 7
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_897
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_400
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_897: # %cond.load1501
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 377
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 376
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 6
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_898
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_401
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_898: # %cond.load1505
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 378
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 377
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 5
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_899
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_402
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_899: # %cond.load1509
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 379
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 378
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 4
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_900
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_403
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_900: # %cond.load1513
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 380
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 379
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 3
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_901
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_404
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_901: # %cond.load1517
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 381
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 380
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 2
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_1036
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_405
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1036: # %cond.load1517
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_406
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_902: # %cond.load1529
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 384
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 383
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_903
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_410
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_903: # %cond.load1533
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 385
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 384
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 2
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_904
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_411
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_904: # %cond.load1537
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 386
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 385
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 4
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_905
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_412
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_905: # %cond.load1541
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 387
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 386
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 8
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_906
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_413
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_906: # %cond.load1545
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 388
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 387
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 16
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_907
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_414
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_907: # %cond.load1549
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 389
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 388
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 32
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_908
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_415
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_908: # %cond.load1553
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 390
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 389
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 64
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_909
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_416
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_909: # %cond.load1557
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 391
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 390
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 128
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_910
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_417
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_910: # %cond.load1561
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 392
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 391
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 256
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_911
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_418
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_911: # %cond.load1565
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 393
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 392
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_912
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_419
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_912: # %cond.load1569
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 394
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 393
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_913
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_420
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_913: # %cond.load1573
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 395
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 394
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 20
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_914
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_421
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_914: # %cond.load1577
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 396
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 395
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 19
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_915
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_422
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_915: # %cond.load1581
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 397
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 396
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 18
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_916
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_423
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_916: # %cond.load1585
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 398
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 397
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 17
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_917
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_424
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_917: # %cond.load1589
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 399
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 398
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 16
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_918
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_425
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_918: # %cond.load1593
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 400
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 399
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 15
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_919
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_426
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_919: # %cond.load1597
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 401
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 400
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 14
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_920
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_427
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_920: # %cond.load1601
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 402
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 401
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 13
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_921
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_428
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_921: # %cond.load1605
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 403
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 402
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 12
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_922
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_429
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_922: # %cond.load1609
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 404
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 403
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 11
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_923
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_430
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_923: # %cond.load1613
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 405
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 404
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 10
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_924
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_431
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_924: # %cond.load1617
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 406
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 405
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 9
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_925
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_432
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_925: # %cond.load1621
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 407
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 406
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 8
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_926
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_433
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_926: # %cond.load1625
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 408
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 407
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 7
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_927
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_434
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_927: # %cond.load1629
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 409
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 408
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 6
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_928
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_435
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_928: # %cond.load1633
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 410
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 409
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 5
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_929
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_436
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_929: # %cond.load1637
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 411
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 410
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 4
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_930
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_437
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_930: # %cond.load1641
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 412
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 411
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 3
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_931
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_438
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_931: # %cond.load1645
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 413
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 412
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 2
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_1037
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_439
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1037: # %cond.load1645
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_440
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_932: # %cond.load1657
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 416
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 415
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_933
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_444
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_933: # %cond.load1661
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 417
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 416
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 2
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_934
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_445
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_934: # %cond.load1665
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 418
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 417
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 4
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_935
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_446
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_935: # %cond.load1669
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 419
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 418
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 8
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_936
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_447
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_936: # %cond.load1673
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 420
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 419
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 16
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_937
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_448
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_937: # %cond.load1677
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 421
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 420
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 32
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_938
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_449
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_938: # %cond.load1681
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 422
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 421
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 64
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_939
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_450
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_939: # %cond.load1685
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 423
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 422
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 128
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_940
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_451
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_940: # %cond.load1689
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 424
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 423
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 256
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_941
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_452
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_941: # %cond.load1693
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 425
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 424
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 512
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_942
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_453
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_942: # %cond.load1697
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 426
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 425
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_943
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_454
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_943: # %cond.load1701
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 427
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 426
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 20
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_944
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_455
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_944: # %cond.load1705
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 428
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 427
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 19
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_945
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_456
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_945: # %cond.load1709
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 429
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 428
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 18
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_946
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_457
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_946: # %cond.load1713
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 430
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 429
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 17
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_947
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_458
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_947: # %cond.load1717
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 431
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 430
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 16
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_948
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_459
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_948: # %cond.load1721
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 432
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 431
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 15
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_949
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_460
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_949: # %cond.load1725
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 433
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 432
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 14
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_950
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_461
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_950: # %cond.load1729
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 434
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 433
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 13
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_951
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_462
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_951: # %cond.load1733
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 435
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 434
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 12
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_952
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_463
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_952: # %cond.load1737
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 436
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 435
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 11
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_953
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_464
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_953: # %cond.load1741
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 437
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 436
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 10
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_954
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_465
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_954: # %cond.load1745
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 438
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 437
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 9
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_955
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_466
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_955: # %cond.load1749
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 439
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 438
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 8
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_956
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_467
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_956: # %cond.load1753
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 440
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 439
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 7
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_957
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_468
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_957: # %cond.load1757
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 441
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 440
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 6
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_958
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_469
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_958: # %cond.load1761
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 442
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 441
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 5
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_959
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_470
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_959: # %cond.load1765
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 443
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 442
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 4
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_960
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_471
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_960: # %cond.load1769
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 444
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 443
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 3
+; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_961
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_472
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_961: # %cond.load1773
+; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 445
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 444
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 2
+; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_1038
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_473
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1038: # %cond.load1773
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_474
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_962: # %cond.load1785
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 448
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 447
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_963
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_478
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_963: # %cond.load1789
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 449
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 448
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 2
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_964
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_479
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_964: # %cond.load1793
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 450
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 449
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 4
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_965
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_480
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_965: # %cond.load1797
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 451
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 450
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 8
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_966
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_481
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_966: # %cond.load1801
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 452
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 451
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 16
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_967
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_482
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_967: # %cond.load1805
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 453
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 452
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 32
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_968
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_483
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_968: # %cond.load1809
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 454
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 453
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 64
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_969
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_484
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_969: # %cond.load1813
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 455
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 454
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 128
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_970
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_485
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_970: # %cond.load1817
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 456
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 455
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 256
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_971
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_486
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_971: # %cond.load1821
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 457
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 456
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_972
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_487
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_972: # %cond.load1825
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 458
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 457
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_973
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_488
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_973: # %cond.load1829
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 459
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 458
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 20
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_974
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_489
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_974: # %cond.load1833
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 460
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 459
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 19
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_975
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_490
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_975: # %cond.load1837
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 461
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 460
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 18
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_976
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_491
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_976: # %cond.load1841
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 462
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 461
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 17
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_977
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_492
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_977: # %cond.load1845
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 463
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 462
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 16
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_978
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_493
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_978: # %cond.load1849
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 464
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 463
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 15
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_979
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_494
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_979: # %cond.load1853
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 465
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 464
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 14
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_980
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_495
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_980: # %cond.load1857
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 466
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 465
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 13
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_981
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_496
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_981: # %cond.load1861
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 467
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 466
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 12
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_982
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_497
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_982: # %cond.load1865
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 468
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 467
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 11
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_983
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_498
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_983: # %cond.load1869
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 469
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 468
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 10
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_984
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_499
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_984: # %cond.load1873
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 470
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 469
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 9
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_985
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_500
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_985: # %cond.load1877
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 471
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 470
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 8
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_986
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_501
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_986: # %cond.load1881
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 472
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 471
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 7
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_987
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_502
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_987: # %cond.load1885
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 473
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 472
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 6
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_988
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_503
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_988: # %cond.load1889
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 474
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 473
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 5
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_989
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_504
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_989: # %cond.load1893
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 475
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 474
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 4
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_990
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_505
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_990: # %cond.load1897
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 476
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 475
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 3
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_991
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_506
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_991: # %cond.load1901
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 477
+; CHECK-VRGATHER-RV32-NEXT:    li a4, 476
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 2
+; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_1039
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_507
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1039: # %cond.load1901
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_508
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_992: # %cond.load1913
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 480
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 479
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 1
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_993
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_512
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_993: # %cond.load1917
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 481
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 480
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 2
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_994
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_513
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_994: # %cond.load1921
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 482
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 481
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 4
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_995
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_514
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_995: # %cond.load1925
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 483
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 482
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 8
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_996
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_515
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_996: # %cond.load1929
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 484
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 483
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 16
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_997
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_516
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_997: # %cond.load1933
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 485
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 484
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 32
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_998
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_517
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_998: # %cond.load1937
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 486
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 485
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 64
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_999
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_518
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_999: # %cond.load1941
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 487
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 486
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 128
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_1000
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_519
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1000: # %cond.load1945
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 488
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 487
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 256
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_1001
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_520
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1001: # %cond.load1949
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 489
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 488
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 512
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_1002
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_521
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1002: # %cond.load1953
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 490
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 489
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 1024
+; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_1003
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_522
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1003: # %cond.load1957
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 491
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 490
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 20
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_1004
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_523
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1004: # %cond.load1961
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 492
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 491
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 19
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_1005
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_524
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1005: # %cond.load1965
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 493
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 492
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 18
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_1006
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_525
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1006: # %cond.load1969
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 494
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 493
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 17
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_1007
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_526
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1007: # %cond.load1973
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 495
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 494
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 16
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_1008
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_527
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1008: # %cond.load1977
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 496
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 495
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 15
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_1009
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_528
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1009: # %cond.load1981
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 497
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 496
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 14
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_1010
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_529
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1010: # %cond.load1985
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 498
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 497
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 13
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_1011
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_530
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1011: # %cond.load1989
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 499
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 498
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 12
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_1012
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_531
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1012: # %cond.load1993
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 500
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 499
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 11
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_1013
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_532
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1013: # %cond.load1997
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 501
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 500
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 10
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_1014
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_533
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1014: # %cond.load2001
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 502
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 501
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 9
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_1015
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_534
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1015: # %cond.load2005
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 503
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 502
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 8
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_1016
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_535
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1016: # %cond.load2009
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 504
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 503
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 7
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_1017
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_536
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1017: # %cond.load2013
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 505
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 504
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 6
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_1018
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_537
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1018: # %cond.load2017
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 506
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 505
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 5
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_1019
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_538
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1019: # %cond.load2021
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 507
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 506
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 4
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_1020
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_539
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1020: # %cond.load2025
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 508
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 507
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 3
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_1021
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_540
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1021: # %cond.load2029
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 509
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 508
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 2
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_1022
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_541
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1022: # %cond.load2033
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 510
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 509
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 1
+; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_1023
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_542
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1023: # %cond.load2037
+; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV32-NEXT:    li a2, 511
+; CHECK-VRGATHER-RV32-NEXT:    li a3, 510
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV32-NEXT:    bltz a1, .LBB61_1024
+; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_543
+; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1024: # %cond.load2041
+; CHECK-VRGATHER-RV32-NEXT:    lbu a0, 0(a0)
+; CHECK-VRGATHER-RV32-NEXT:    li a1, 512
+; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
+; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a0
+; CHECK-VRGATHER-RV32-NEXT:    li a0, 511
+; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a0
+; CHECK-VRGATHER-RV32-NEXT:    ret
+;
+; CHECK-VRGATHER-RV64-LABEL: test_expandload_v512i8_vlen512:
+; CHECK-VRGATHER-RV64:       # %bb.0:
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.x.s a2, v0
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 1
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_1
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_527
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1: # %else
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 2
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_2
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_528
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_2: # %else2
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 4
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_3
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_529
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_3: # %else6
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 8
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_4
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_530
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_4: # %else10
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 16
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_5
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_531
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_5: # %else14
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 32
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_6
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_532
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_6: # %else18
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 64
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_7
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_533
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_7: # %else22
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 128
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_8
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_534
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_8: # %else26
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 256
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_9
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_535
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_9: # %else30
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 512
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_10
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_536
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_10: # %else34
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 1024
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_11
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_537
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_11: # %else38
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 52
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_12
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_538
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_12: # %else42
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 51
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_13
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_539
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_13: # %else46
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 50
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_14
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_540
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_14: # %else50
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 49
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_15
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_541
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_15: # %else54
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 48
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_16
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_542
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_16: # %else58
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 47
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_17
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_543
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_17: # %else62
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 46
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_18
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_544
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_18: # %else66
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 45
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_19
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_545
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_19: # %else70
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 44
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_20
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_546
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_20: # %else74
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 43
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_21
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_547
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_21: # %else78
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 42
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_22
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_548
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_22: # %else82
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 41
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_23
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_549
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_23: # %else86
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 40
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_24
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_550
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_24: # %else90
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 39
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_25
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_551
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_25: # %else94
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 38
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_26
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_552
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_26: # %else98
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 37
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_27
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_553
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_27: # %else102
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 36
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_28
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_554
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_28: # %else106
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 35
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_29
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_555
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_29: # %else110
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 34
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_30
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_556
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_30: # %else114
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 33
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_31
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_557
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_31: # %else118
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 32
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_32
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_558
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_32: # %else122
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 31
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_33
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_559
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_33: # %else126
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 30
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_34
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_560
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_34: # %else130
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 29
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_35
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_561
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_35: # %else134
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 28
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_36
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_562
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_36: # %else138
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 27
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_37
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_563
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_37: # %else142
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 26
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_38
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_564
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_38: # %else146
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 25
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_39
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_565
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_39: # %else150
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 24
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_40
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_566
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_40: # %else154
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 23
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_41
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_567
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_41: # %else158
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 22
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_42
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_568
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_42: # %else162
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 21
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_43
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_569
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_43: # %else166
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 20
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_44
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_570
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_44: # %else170
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 19
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_45
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_571
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_45: # %else174
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 18
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_46
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_572
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_46: # %else178
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 17
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_47
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_573
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_47: # %else182
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 16
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_48
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_574
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_48: # %else186
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 15
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_49
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_575
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_49: # %else190
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 14
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_50
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_576
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_50: # %else194
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 13
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_51
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_577
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_51: # %else198
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 12
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_52
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_578
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_52: # %else202
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 11
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_53
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_579
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_53: # %else206
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 10
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_54
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_580
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_54: # %else210
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 9
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_55
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_581
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_55: # %else214
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 8
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_56
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_582
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_56: # %else218
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 7
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_57
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_583
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_57: # %else222
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 6
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_58
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_584
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_58: # %else226
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 5
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_59
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_585
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_59: # %else230
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 4
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_60
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_586
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_60: # %else234
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 3
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_61
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_587
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_61: # %else238
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 2
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_63
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_62: # %cond.load241
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 62
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 61
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_63: # %else242
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 1
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslidedown.vi v16, v0, 1
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_65
+; CHECK-VRGATHER-RV64-NEXT:  # %bb.64: # %cond.load245
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v17, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 63
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 62
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v17, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_65: # %else246
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.x.s a1, v16
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_66
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_588
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_66: # %else250
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 1
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_67
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_589
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_67: # %else254
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 2
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_68
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_590
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_68: # %else258
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 4
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_69
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_591
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_69: # %else262
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 8
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_70
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_592
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_70: # %else266
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 16
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_71
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_593
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_71: # %else270
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 32
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_72
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_594
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_72: # %else274
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 64
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_73
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_595
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_73: # %else278
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 128
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_74
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_596
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_74: # %else282
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 256
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_75
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_597
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_75: # %else286
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 512
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_76
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_598
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_76: # %else290
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 1024
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_77
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_599
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_77: # %else294
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 52
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_78
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_600
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_78: # %else298
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 51
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_79
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_601
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_79: # %else302
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 50
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_80
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_602
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_80: # %else306
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 49
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_81
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_603
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_81: # %else310
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 48
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_82
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_604
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_82: # %else314
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 47
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_83
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_605
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_83: # %else318
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 46
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_84
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_606
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_84: # %else322
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 45
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_85
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_607
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_85: # %else326
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 44
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_86
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_608
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_86: # %else330
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 43
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_87
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_609
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_87: # %else334
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 42
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_88
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_610
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_88: # %else338
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 41
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_89
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_611
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_89: # %else342
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 40
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_90
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_612
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_90: # %else346
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 39
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_91
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_613
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_91: # %else350
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 38
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_92
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_614
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_92: # %else354
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 37
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_93
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_615
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_93: # %else358
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 36
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_94
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_616
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_94: # %else362
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 35
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_95
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_617
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_95: # %else366
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 34
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_96
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_618
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_96: # %else370
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 33
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_97
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_619
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_97: # %else374
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 32
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_98
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_620
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_98: # %else378
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 31
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_99
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_621
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_99: # %else382
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 30
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_100
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_622
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_100: # %else386
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 29
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_101
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_623
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_101: # %else390
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 28
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_102
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_624
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_102: # %else394
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 27
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_103
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_625
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_103: # %else398
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 26
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_104
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_626
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_104: # %else402
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 25
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_105
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_627
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_105: # %else406
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 24
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_106
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_628
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_106: # %else410
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 23
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_107
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_629
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_107: # %else414
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 22
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_108
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_630
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_108: # %else418
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 21
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_109
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_631
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_109: # %else422
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 20
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_110
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_632
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_110: # %else426
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 19
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_111
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_633
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_111: # %else430
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 18
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_112
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_634
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_112: # %else434
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 17
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_113
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_635
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_113: # %else438
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 16
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_114
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_636
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_114: # %else442
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 15
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_115
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_637
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_115: # %else446
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 14
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_116
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_638
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_116: # %else450
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 13
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_117
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_639
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_117: # %else454
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 12
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_118
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_640
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_118: # %else458
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 11
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_119
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_641
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_119: # %else462
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 10
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_120
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_642
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_120: # %else466
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 9
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_121
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_643
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_121: # %else470
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 8
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_122
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_644
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_122: # %else474
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 7
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_123
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_645
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_123: # %else478
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 6
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_124
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_646
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_124: # %else482
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 5
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_125
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_647
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_125: # %else486
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 4
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_126
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_648
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_126: # %else490
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 3
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_127
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_649
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_127: # %else494
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 2
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_129
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_128: # %cond.load497
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 126
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 125
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_129: # %else498
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 1
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslidedown.vi v16, v0, 2
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_131
+; CHECK-VRGATHER-RV64-NEXT:  # %bb.130: # %cond.load501
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v18, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 127
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 126
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v18, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_131: # %else502
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.x.s a2, v16
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_132
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_650
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_132: # %else506
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 1
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_133
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_651
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_133: # %else510
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 2
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_134
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_652
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_134: # %else514
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 4
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_135
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_653
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_135: # %else518
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 8
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_136
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_654
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_136: # %else522
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 16
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_137
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_655
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_137: # %else526
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 32
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_138
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_656
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_138: # %else530
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 64
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_139
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_657
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_139: # %else534
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 128
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_140
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_658
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_140: # %else538
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 256
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_141
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_659
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_141: # %else542
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 512
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_142
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_660
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_142: # %else546
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 1024
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_143
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_661
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_143: # %else550
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 52
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_144
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_662
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_144: # %else554
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 51
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_145
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_663
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_145: # %else558
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 50
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_146
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_664
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_146: # %else562
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 49
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_147
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_665
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_147: # %else566
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 48
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_148
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_666
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_148: # %else570
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 47
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_149
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_667
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_149: # %else574
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 46
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_150
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_668
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_150: # %else578
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 45
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_151
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_669
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_151: # %else582
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 44
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_152
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_670
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_152: # %else586
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 43
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_153
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_671
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_153: # %else590
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 42
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_154
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_672
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_154: # %else594
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 41
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_155
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_673
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_155: # %else598
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 40
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_156
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_674
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_156: # %else602
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 39
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_157
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_675
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_157: # %else606
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 38
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_158
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_676
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_158: # %else610
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 37
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_159
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_677
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_159: # %else614
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 36
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_160
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_678
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_160: # %else618
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 35
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_161
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_679
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_161: # %else622
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 34
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_162
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_680
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_162: # %else626
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 33
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_163
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_681
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_163: # %else630
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 32
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_164
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_682
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_164: # %else634
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 31
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_165
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_683
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_165: # %else638
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 30
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_166
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_684
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_166: # %else642
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 29
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_167
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_685
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_167: # %else646
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 28
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_168
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_686
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_168: # %else650
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 27
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_169
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_687
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_169: # %else654
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 26
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_170
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_688
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_170: # %else658
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 25
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_171
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_689
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_171: # %else662
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 24
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_172
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_690
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_172: # %else666
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 23
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_173
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_691
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_173: # %else670
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 22
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_174
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_692
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_174: # %else674
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 21
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_175
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_693
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_175: # %else678
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 20
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_176
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_694
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_176: # %else682
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 19
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_177
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_695
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_177: # %else686
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 18
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_178
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_696
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_178: # %else690
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 17
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_179
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_697
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_179: # %else694
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 16
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_180
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_698
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_180: # %else698
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 15
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_181
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_699
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_181: # %else702
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 14
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_182
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_700
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_182: # %else706
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 13
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_183
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_701
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_183: # %else710
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 12
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_184
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_702
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_184: # %else714
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 11
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_185
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_703
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_185: # %else718
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 10
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_186
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_704
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_186: # %else722
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 9
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_187
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_705
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_187: # %else726
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 8
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_188
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_706
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_188: # %else730
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 7
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_189
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_707
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_189: # %else734
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 6
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_190
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_708
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_190: # %else738
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 5
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_191
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_709
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_191: # %else742
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 4
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_192
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_710
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_192: # %else746
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 3
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_193
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_711
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_193: # %else750
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 2
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_195
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_194: # %cond.load753
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 190
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 189
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_195: # %else754
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 1
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslidedown.vi v16, v0, 3
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_197
+; CHECK-VRGATHER-RV64-NEXT:  # %bb.196: # %cond.load757
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v20, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 191
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 190
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v20, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_197: # %else758
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.x.s a1, v16
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_198
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_712
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_198: # %else762
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 1
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_199
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_713
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_199: # %else766
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 2
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_200
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_714
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_200: # %else770
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 4
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_201
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_715
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_201: # %else774
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 8
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_202
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_716
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_202: # %else778
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 16
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_203
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_717
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_203: # %else782
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 32
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_204
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_718
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_204: # %else786
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 64
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_205
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_719
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_205: # %else790
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 128
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_206
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_720
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_206: # %else794
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 256
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_207
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_721
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_207: # %else798
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 512
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_208
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_722
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_208: # %else802
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 1024
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_209
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_723
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_209: # %else806
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 52
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_210
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_724
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_210: # %else810
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 51
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_211
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_725
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_211: # %else814
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 50
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_212
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_726
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_212: # %else818
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 49
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_213
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_727
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_213: # %else822
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 48
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_214
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_728
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_214: # %else826
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 47
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_215
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_729
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_215: # %else830
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 46
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_216
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_730
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_216: # %else834
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 45
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_217
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_731
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_217: # %else838
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 44
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_218
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_732
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_218: # %else842
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 43
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_219
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_733
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_219: # %else846
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 42
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_220
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_734
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_220: # %else850
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 41
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_221
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_735
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_221: # %else854
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 40
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_222
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_736
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_222: # %else858
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 39
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_223
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_737
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_223: # %else862
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 38
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_224
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_738
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_224: # %else866
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 37
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_225
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_739
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_225: # %else870
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 36
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_226
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_740
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_226: # %else874
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 35
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_227
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_741
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_227: # %else878
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 34
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_228
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_742
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_228: # %else882
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 33
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_229
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_743
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_229: # %else886
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 32
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_230
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_744
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_230: # %else890
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 31
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_231
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_745
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_231: # %else894
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 30
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_232
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_746
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_232: # %else898
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 29
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_233
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_747
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_233: # %else902
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 28
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_234
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_748
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_234: # %else906
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 27
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_235
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_749
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_235: # %else910
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 26
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_236
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_750
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_236: # %else914
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 25
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_237
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_751
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_237: # %else918
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 24
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_238
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_752
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_238: # %else922
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 23
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_239
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_753
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_239: # %else926
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 22
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_240
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_754
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_240: # %else930
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 21
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_241
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_755
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_241: # %else934
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 20
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_242
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_756
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_242: # %else938
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 19
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_243
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_757
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_243: # %else942
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 18
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_244
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_758
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_244: # %else946
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 17
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_245
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_759
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_245: # %else950
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 16
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_246
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_760
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_246: # %else954
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 15
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_247
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_761
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_247: # %else958
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 14
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_248
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_762
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_248: # %else962
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 13
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_249
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_763
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_249: # %else966
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 12
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_250
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_764
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_250: # %else970
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 11
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_251
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_765
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_251: # %else974
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 10
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_252
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_766
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_252: # %else978
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 9
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_253
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_767
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_253: # %else982
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 8
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_254
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_768
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_254: # %else986
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 7
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_255
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_769
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_255: # %else990
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 6
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_256
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_770
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_256: # %else994
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 5
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_257
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_771
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_257: # %else998
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 4
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_258
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_772
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_258: # %else1002
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 3
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_259
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_773
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_259: # %else1006
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 2
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_261
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_260: # %cond.load1009
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 254
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 253
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_261: # %else1010
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 1
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslidedown.vi v16, v0, 4
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_263
+; CHECK-VRGATHER-RV64-NEXT:  # %bb.262: # %cond.load1013
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v20, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 255
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 254
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v20, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_263: # %else1014
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.x.s a2, v16
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_264
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_774
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_264: # %else1018
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 1
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_265
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_775
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_265: # %else1022
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 2
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_266
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_776
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_266: # %else1026
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 4
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_267
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_777
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_267: # %else1030
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 8
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_268
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_778
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_268: # %else1034
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 16
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_269
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_779
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_269: # %else1038
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 32
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_270
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_780
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_270: # %else1042
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 64
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_271
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_781
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_271: # %else1046
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 128
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_272
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_782
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_272: # %else1050
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 256
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_273
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_783
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_273: # %else1054
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 512
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_274
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_784
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_274: # %else1058
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 1024
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_275
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_785
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_275: # %else1062
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 52
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_276
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_786
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_276: # %else1066
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 51
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_277
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_787
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_277: # %else1070
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 50
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_278
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_788
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_278: # %else1074
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 49
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_279
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_789
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_279: # %else1078
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 48
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_280
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_790
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_280: # %else1082
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 47
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_281
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_791
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_281: # %else1086
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 46
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_282
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_792
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_282: # %else1090
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 45
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_283
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_793
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_283: # %else1094
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 44
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_284
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_794
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_284: # %else1098
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 43
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_285
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_795
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_285: # %else1102
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 42
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_286
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_796
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_286: # %else1106
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 41
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_287
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_797
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_287: # %else1110
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 40
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_288
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_798
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_288: # %else1114
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 39
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_289
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_799
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_289: # %else1118
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 38
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_290
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_800
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_290: # %else1122
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 37
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_291
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_801
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_291: # %else1126
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 36
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_292
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_802
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_292: # %else1130
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 35
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_293
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_803
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_293: # %else1134
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 34
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_294
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_804
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_294: # %else1138
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 33
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_295
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_805
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_295: # %else1142
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 32
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_296
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_806
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_296: # %else1146
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 31
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_297
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_807
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_297: # %else1150
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 30
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_298
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_808
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_298: # %else1154
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 29
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_299
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_809
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_299: # %else1158
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 28
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_300
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_810
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_300: # %else1162
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 27
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_301
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_811
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_301: # %else1166
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 26
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_302
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_812
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_302: # %else1170
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 25
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_303
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_813
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_303: # %else1174
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 24
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_304
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_814
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_304: # %else1178
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 23
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_305
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_815
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_305: # %else1182
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 22
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_306
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_816
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_306: # %else1186
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 21
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_307
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_817
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_307: # %else1190
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 20
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_308
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_818
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_308: # %else1194
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 19
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_309
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_819
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_309: # %else1198
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 18
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_310
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_820
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_310: # %else1202
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 17
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_311
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_821
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_311: # %else1206
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 16
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_312
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_822
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_312: # %else1210
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 15
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_313
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_823
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_313: # %else1214
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 14
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_314
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_824
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_314: # %else1218
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 13
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_315
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_825
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_315: # %else1222
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 12
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_316
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_826
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_316: # %else1226
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 11
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_317
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_827
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_317: # %else1230
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 10
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_318
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_828
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_318: # %else1234
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 9
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_319
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_829
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_319: # %else1238
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 8
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_320
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_830
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_320: # %else1242
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 7
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_321
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_831
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_321: # %else1246
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 6
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_322
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_832
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_322: # %else1250
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 5
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_323
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_833
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_323: # %else1254
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 4
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_324
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_834
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_324: # %else1258
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 3
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_325
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_835
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_325: # %else1262
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 2
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_327
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_326: # %cond.load1265
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 318
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 317
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_327: # %else1266
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 1
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslidedown.vi v16, v0, 5
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_329
+; CHECK-VRGATHER-RV64-NEXT:  # %bb.328: # %cond.load1269
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 319
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 318
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_329: # %else1270
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.x.s a1, v16
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_330
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_836
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_330: # %else1274
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 1
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_331
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_837
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_331: # %else1278
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 2
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_332
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_838
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_332: # %else1282
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 4
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_333
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_839
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_333: # %else1286
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 8
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_334
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_840
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_334: # %else1290
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 16
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_335
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_841
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_335: # %else1294
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 32
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_336
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_842
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_336: # %else1298
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 64
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_337
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_843
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_337: # %else1302
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 128
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_338
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_844
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_338: # %else1306
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 256
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_339
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_845
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_339: # %else1310
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 512
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_340
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_846
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_340: # %else1314
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 1024
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_341
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_847
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_341: # %else1318
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 52
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_342
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_848
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_342: # %else1322
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 51
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_343
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_849
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_343: # %else1326
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 50
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_344
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_850
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_344: # %else1330
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 49
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_345
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_851
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_345: # %else1334
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 48
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_346
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_852
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_346: # %else1338
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 47
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_347
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_853
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_347: # %else1342
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 46
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_348
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_854
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_348: # %else1346
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 45
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_349
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_855
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_349: # %else1350
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 44
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_350
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_856
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_350: # %else1354
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 43
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_351
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_857
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_351: # %else1358
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 42
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_352
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_858
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_352: # %else1362
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 41
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_353
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_859
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_353: # %else1366
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 40
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_354
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_860
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_354: # %else1370
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 39
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_355
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_861
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_355: # %else1374
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 38
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_356
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_862
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_356: # %else1378
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 37
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_357
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_863
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_357: # %else1382
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 36
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_358
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_864
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_358: # %else1386
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 35
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_359
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_865
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_359: # %else1390
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 34
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_360
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_866
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_360: # %else1394
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 33
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_361
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_867
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_361: # %else1398
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 32
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_362
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_868
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_362: # %else1402
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 31
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_363
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_869
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_363: # %else1406
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 30
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_364
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_870
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_364: # %else1410
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 29
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_365
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_871
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_365: # %else1414
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 28
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_366
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_872
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_366: # %else1418
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 27
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_367
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_873
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_367: # %else1422
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 26
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_368
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_874
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_368: # %else1426
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 25
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_369
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_875
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_369: # %else1430
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 24
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_370
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_876
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_370: # %else1434
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 23
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_371
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_877
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_371: # %else1438
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 22
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_372
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_878
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_372: # %else1442
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 21
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_373
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_879
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_373: # %else1446
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 20
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_374
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_880
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_374: # %else1450
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 19
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_375
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_881
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_375: # %else1454
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 18
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_376
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_882
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_376: # %else1458
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 17
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_377
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_883
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_377: # %else1462
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 16
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_378
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_884
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_378: # %else1466
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 15
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_379
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_885
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_379: # %else1470
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 14
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_380
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_886
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_380: # %else1474
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 13
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_381
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_887
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_381: # %else1478
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 12
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_382
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_888
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_382: # %else1482
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 11
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_383
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_889
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_383: # %else1486
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 10
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_384
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_890
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_384: # %else1490
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 9
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_385
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_891
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_385: # %else1494
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 8
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_386
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_892
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_386: # %else1498
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 7
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_387
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_893
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_387: # %else1502
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 6
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_388
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_894
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_388: # %else1506
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 5
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_389
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_895
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_389: # %else1510
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 4
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_390
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_896
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_390: # %else1514
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 3
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_391
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_897
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_391: # %else1518
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 2
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_393
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_392: # %cond.load1521
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 382
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 381
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_393: # %else1522
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 1
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslidedown.vi v16, v0, 6
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_395
+; CHECK-VRGATHER-RV64-NEXT:  # %bb.394: # %cond.load1525
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 383
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 382
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_395: # %else1526
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.x.s a2, v16
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_396
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_898
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_396: # %else1530
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 1
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_397
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_899
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_397: # %else1534
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 2
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_398
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_900
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_398: # %else1538
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 4
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_399
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_901
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_399: # %else1542
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 8
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_400
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_902
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_400: # %else1546
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 16
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_401
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_903
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_401: # %else1550
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 32
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_402
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_904
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_402: # %else1554
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 64
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_403
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_905
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_403: # %else1558
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 128
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_404
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_906
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_404: # %else1562
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 256
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_405
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_907
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_405: # %else1566
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 512
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_406
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_908
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_406: # %else1570
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 1024
+; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_407
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_909
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_407: # %else1574
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 52
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_408
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_910
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_408: # %else1578
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 51
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_409
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_911
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_409: # %else1582
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 50
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_410
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_912
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_410: # %else1586
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 49
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_411
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_913
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_411: # %else1590
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 48
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_412
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_914
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_412: # %else1594
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 47
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_413
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_915
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_413: # %else1598
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 46
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_414
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_916
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_414: # %else1602
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 45
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_415
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_917
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_415: # %else1606
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 44
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_416
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_918
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_416: # %else1610
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 43
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_417
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_919
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_417: # %else1614
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 42
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_418
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_920
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_418: # %else1618
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 41
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_419
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_921
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_419: # %else1622
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 40
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_420
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_922
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_420: # %else1626
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 39
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_421
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_923
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_421: # %else1630
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 38
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_422
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_924
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_422: # %else1634
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 37
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_423
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_925
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_423: # %else1638
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 36
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_424
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_926
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_424: # %else1642
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 35
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_425
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_927
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_425: # %else1646
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 34
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_426
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_928
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_426: # %else1650
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 33
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_427
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_929
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_427: # %else1654
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 32
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_428
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_930
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_428: # %else1658
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 31
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_429
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_931
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_429: # %else1662
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 30
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_430
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_932
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_430: # %else1666
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 29
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_431
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_933
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_431: # %else1670
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 28
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_432
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_934
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_432: # %else1674
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 27
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_433
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_935
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_433: # %else1678
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 26
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_434
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_936
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_434: # %else1682
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 25
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_435
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_937
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_435: # %else1686
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 24
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_436
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_938
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_436: # %else1690
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 23
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_437
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_939
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_437: # %else1694
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 22
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_438
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_940
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_438: # %else1698
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 21
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_439
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_941
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_439: # %else1702
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 20
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_440
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_942
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_440: # %else1706
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 19
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_441
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_943
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_441: # %else1710
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 18
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_442
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_944
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_442: # %else1714
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 17
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_443
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_945
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_443: # %else1718
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 16
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_444
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_946
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_444: # %else1722
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 15
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_445
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_947
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_445: # %else1726
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 14
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_446
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_948
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_446: # %else1730
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 13
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_447
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_949
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_447: # %else1734
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 12
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_448
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_950
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_448: # %else1738
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 11
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_449
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_951
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_449: # %else1742
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 10
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_450
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_952
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_450: # %else1746
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 9
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_451
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_953
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_451: # %else1750
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 8
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_452
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_954
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_452: # %else1754
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 7
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_453
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_955
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_453: # %else1758
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 6
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_454
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_956
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_454: # %else1762
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 5
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_455
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_957
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_455: # %else1766
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 4
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_456
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_958
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_456: # %else1770
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 3
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_457
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_959
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_457: # %else1774
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 2
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_459
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_458: # %cond.load1777
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 446
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 445
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_459: # %else1778
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 1
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslidedown.vi v16, v0, 7
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_461
+; CHECK-VRGATHER-RV64-NEXT:  # %bb.460: # %cond.load1781
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 447
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 446
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_461: # %else1782
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.x.s a1, v16
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_462
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_960
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_462: # %else1786
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 1
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_463
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_961
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_463: # %else1790
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 2
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_464
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_962
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_464: # %else1794
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 4
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_465
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_963
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_465: # %else1798
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 8
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_466
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_964
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_466: # %else1802
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 16
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_467
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_965
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_467: # %else1806
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 32
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_468
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_966
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_468: # %else1810
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 64
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_469
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_967
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_469: # %else1814
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 128
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_470
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_968
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_470: # %else1818
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 256
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_471
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_969
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_471: # %else1822
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 512
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_472
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_970
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_472: # %else1826
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 1024
+; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_473
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_971
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_473: # %else1830
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 52
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_474
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_972
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_474: # %else1834
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 51
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_475
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_973
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_475: # %else1838
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 50
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_476
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_974
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_476: # %else1842
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 49
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_477
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_975
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_477: # %else1846
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 48
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_478
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_976
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_478: # %else1850
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 47
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_479
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_977
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_479: # %else1854
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 46
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_480
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_978
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_480: # %else1858
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 45
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_481
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_979
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_481: # %else1862
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 44
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_482
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_980
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_482: # %else1866
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 43
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_483
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_981
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_483: # %else1870
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 42
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_484
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_982
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_484: # %else1874
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 41
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_485
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_983
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_485: # %else1878
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 40
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_486
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_984
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_486: # %else1882
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 39
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_487
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_985
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_487: # %else1886
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 38
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_488
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_986
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_488: # %else1890
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 37
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_489
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_987
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_489: # %else1894
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 36
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_490
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_988
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_490: # %else1898
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 35
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_491
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_989
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_491: # %else1902
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 34
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_492
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_990
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_492: # %else1906
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 33
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_493
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_991
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_493: # %else1910
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 32
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_494
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_992
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_494: # %else1914
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 31
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_495
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_993
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_495: # %else1918
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 30
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_496
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_994
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_496: # %else1922
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 29
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_497
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_995
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_497: # %else1926
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 28
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_498
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_996
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_498: # %else1930
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 27
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_499
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_997
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_499: # %else1934
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 26
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_500
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_998
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_500: # %else1938
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 25
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_501
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_999
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_501: # %else1942
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 24
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_502
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1000
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_502: # %else1946
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 23
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_503
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1001
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_503: # %else1950
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 22
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_504
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1002
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_504: # %else1954
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 21
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_505
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1003
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_505: # %else1958
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 20
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_506
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1004
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_506: # %else1962
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 19
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_507
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1005
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_507: # %else1966
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 18
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_508
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1006
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_508: # %else1970
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 17
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_509
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1007
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_509: # %else1974
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 16
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_510
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1008
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_510: # %else1978
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 15
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_511
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1009
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_511: # %else1982
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 14
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_512
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1010
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_512: # %else1986
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 13
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_513
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1011
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_513: # %else1990
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 12
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_514
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1012
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_514: # %else1994
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 11
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_515
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1013
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_515: # %else1998
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 10
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_516
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1014
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_516: # %else2002
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 9
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_517
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1015
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_517: # %else2006
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 8
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_518
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1016
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_518: # %else2010
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 7
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_519
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1017
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_519: # %else2014
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 6
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_520
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1018
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_520: # %else2018
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 5
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_521
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1019
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_521: # %else2022
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 4
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_522
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1020
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_522: # %else2026
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 3
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_523
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1021
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_523: # %else2030
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 2
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_524
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1022
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_524: # %else2034
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 1
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_525
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1023
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_525: # %else2038
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_526
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1024
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_526: # %else2042
+; CHECK-VRGATHER-RV64-NEXT:    ret
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_527: # %cond.load
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, zero, e8, mf8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v8, a1
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 2
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_528
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_2
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_528: # %cond.load1
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, zero, e8, mf8, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 2, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 1
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 4
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_529
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_3
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_529: # %cond.load5
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 3, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 2
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 8
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_530
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_4
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_530: # %cond.load9
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 4, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 16
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_531
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_5
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_531: # %cond.load13
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 5, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 4
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 32
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_532
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_6
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_532: # %cond.load17
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 6, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 5
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 64
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_533
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_7
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_533: # %cond.load21
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 7, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 6
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 128
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_534
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_8
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_534: # %cond.load25
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 8, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 7
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 256
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_535
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_9
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_535: # %cond.load29
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 9, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 8
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 512
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_536
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_10
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_536: # %cond.load33
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 10, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 9
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 1024
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_537
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_11
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_537: # %cond.load37
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 11, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 10
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 52
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_538
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_12
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_538: # %cond.load41
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 12, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 11
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 51
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_539
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_13
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_539: # %cond.load45
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 13, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 12
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 50
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_540
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_14
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_540: # %cond.load49
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 14, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 13
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 49
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_541
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_15
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_541: # %cond.load53
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 15, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 14
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 48
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_542
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_16
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_542: # %cond.load57
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 16, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 15
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 47
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_543
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_17
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_543: # %cond.load61
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 17, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 16
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 46
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_544
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_18
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_544: # %cond.load65
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 18, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 17
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 45
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_545
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_19
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_545: # %cond.load69
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 19, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 18
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 44
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_546
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_20
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_546: # %cond.load73
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 20, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 19
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 43
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_547
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_21
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_547: # %cond.load77
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 21, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 20
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 42
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_548
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_22
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_548: # %cond.load81
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 22, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 21
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 41
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_549
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_23
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_549: # %cond.load85
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 23, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 22
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 40
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_550
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_24
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_550: # %cond.load89
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 24, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 23
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 39
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_551
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_25
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_551: # %cond.load93
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 25, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 24
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 38
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_552
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_26
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_552: # %cond.load97
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 26, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 25
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 37
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_553
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_27
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_553: # %cond.load101
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 27, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 26
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 36
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_554
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_28
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_554: # %cond.load105
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 28, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 27
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 35
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_555
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_29
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_555: # %cond.load109
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 29, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 28
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 34
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_556
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_30
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_556: # %cond.load113
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 30, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 29
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 33
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_557
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_31
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_557: # %cond.load117
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 31, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 30
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 32
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_558
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_32
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_558: # %cond.load121
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 32
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v24, 31
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 31
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_559
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_33
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_559: # %cond.load125
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 33
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 32
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 30
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_560
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_34
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_560: # %cond.load129
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 34
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 33
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 29
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_561
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_35
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_561: # %cond.load133
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 35
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 34
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 28
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_562
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_36
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_562: # %cond.load137
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 36
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 35
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 27
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_563
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_37
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_563: # %cond.load141
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 37
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 36
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 26
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_564
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_38
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_564: # %cond.load145
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 38
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 37
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 25
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_565
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_39
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_565: # %cond.load149
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 39
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 38
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 24
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_566
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_40
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_566: # %cond.load153
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 40
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 39
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 23
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_567
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_41
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_567: # %cond.load157
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 41
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 40
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 22
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_568
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_42
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_568: # %cond.load161
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 42
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 41
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 21
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_569
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_43
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_569: # %cond.load165
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 43
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 42
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 20
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_570
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_44
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_570: # %cond.load169
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 44
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 43
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 19
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_571
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_45
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_571: # %cond.load173
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 45
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 44
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 18
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_572
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_46
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_572: # %cond.load177
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 46
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 45
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 17
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_573
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_47
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_573: # %cond.load181
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 47
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 46
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 16
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_574
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_48
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_574: # %cond.load185
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 48
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 47
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 15
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_575
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_49
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_575: # %cond.load189
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 49
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 48
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 14
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_576
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_50
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_576: # %cond.load193
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 50
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 49
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 13
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_577
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_51
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_577: # %cond.load197
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 51
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 50
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 12
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_578
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_52
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_578: # %cond.load201
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 52
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 51
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 11
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_579
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_53
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_579: # %cond.load205
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 53
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 52
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 10
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_580
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_54
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_580: # %cond.load209
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 54
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 53
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 9
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_581
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_55
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_581: # %cond.load213
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 55
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 54
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 8
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_582
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_56
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_582: # %cond.load217
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 56
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 55
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 7
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_583
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_57
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_583: # %cond.load221
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 57
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 56
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 6
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_584
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_58
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_584: # %cond.load225
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 58
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 57
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 5
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_585
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_59
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_585: # %cond.load229
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 59
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 58
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 4
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_586
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_60
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_586: # %cond.load233
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 60
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 59
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 3
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_587
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_61
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_587: # %cond.load237
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 61
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 60
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 2
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_1025
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_62
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1025: # %cond.load237
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_63
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_588: # %cond.load249
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 64
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 63
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m1, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 1
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_589
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_67
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_589: # %cond.load253
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 65
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 64
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 2
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_590
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_68
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_590: # %cond.load257
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 66
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 65
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 4
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_591
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_69
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_591: # %cond.load261
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 67
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 66
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 8
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_592
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_70
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_592: # %cond.load265
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 68
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 67
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 16
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_593
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_71
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_593: # %cond.load269
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 69
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 68
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 32
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_594
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_72
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_594: # %cond.load273
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 70
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 69
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 64
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_595
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_73
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_595: # %cond.load277
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 71
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 70
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 128
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_596
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_74
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_596: # %cond.load281
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 72
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 71
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 256
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_597
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_75
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_597: # %cond.load285
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 73
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 72
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 512
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_598
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_76
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_598: # %cond.load289
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 74
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 73
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 1024
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_599
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_77
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_599: # %cond.load293
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 75
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 74
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 52
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_600
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_78
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_600: # %cond.load297
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 76
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 75
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 51
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_601
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_79
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_601: # %cond.load301
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 77
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 76
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 50
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_602
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_80
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_602: # %cond.load305
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 78
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 77
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 49
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_603
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_81
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_603: # %cond.load309
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 79
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 78
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 48
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_604
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_82
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_604: # %cond.load313
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 80
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 79
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 47
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_605
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_83
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_605: # %cond.load317
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 81
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 80
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 46
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_606
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_84
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_606: # %cond.load321
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 82
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 81
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 45
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_607
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_85
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_607: # %cond.load325
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 83
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 82
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 44
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_608
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_86
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_608: # %cond.load329
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 84
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 83
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 43
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_609
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_87
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_609: # %cond.load333
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 85
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 84
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 42
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_610
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_88
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_610: # %cond.load337
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 86
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 85
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 41
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_611
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_89
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_611: # %cond.load341
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 87
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 86
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 40
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_612
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_90
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_612: # %cond.load345
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 88
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 87
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 39
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_613
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_91
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_613: # %cond.load349
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 89
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 88
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 38
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_614
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_92
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_614: # %cond.load353
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 90
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 89
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 37
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_615
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_93
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_615: # %cond.load357
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 91
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 90
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 36
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_616
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_94
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_616: # %cond.load361
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 92
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 91
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 35
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_617
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_95
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_617: # %cond.load365
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 93
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 92
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 34
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_618
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_96
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_618: # %cond.load369
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 94
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 93
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 33
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_619
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_97
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_619: # %cond.load373
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 95
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 94
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 32
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_620
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_98
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_620: # %cond.load377
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 96
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 95
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 31
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_621
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_99
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_621: # %cond.load381
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 97
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 96
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 30
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_622
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_100
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_622: # %cond.load385
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 98
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 97
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 29
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_623
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_101
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_623: # %cond.load389
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 99
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 98
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 28
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_624
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_102
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_624: # %cond.load393
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 100
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 99
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 27
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_625
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_103
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_625: # %cond.load397
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 101
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 100
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 26
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_626
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_104
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_626: # %cond.load401
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 102
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 101
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 25
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_627
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_105
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_627: # %cond.load405
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 103
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 102
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 24
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_628
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_106
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_628: # %cond.load409
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 104
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 103
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 23
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_629
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_107
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_629: # %cond.load413
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 105
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 104
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 22
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_630
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_108
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_630: # %cond.load417
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 106
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 105
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 21
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_631
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_109
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_631: # %cond.load421
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 107
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 106
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 20
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_632
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_110
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_632: # %cond.load425
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 108
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 107
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 19
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_633
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_111
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_633: # %cond.load429
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 109
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 108
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 18
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_634
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_112
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_634: # %cond.load433
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 110
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 109
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 17
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_635
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_113
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_635: # %cond.load437
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 111
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 110
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 16
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_636
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_114
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_636: # %cond.load441
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 112
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 111
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 15
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_637
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_115
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_637: # %cond.load445
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 113
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 112
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 14
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_638
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_116
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_638: # %cond.load449
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 114
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 113
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 13
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_639
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_117
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_639: # %cond.load453
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 115
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 114
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 12
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_640
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_118
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_640: # %cond.load457
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 116
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 115
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 11
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_641
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_119
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_641: # %cond.load461
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 117
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 116
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 10
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_642
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_120
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_642: # %cond.load465
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 118
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 117
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 9
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_643
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_121
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_643: # %cond.load469
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 119
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 118
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 8
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_644
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_122
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_644: # %cond.load473
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 120
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 119
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 7
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_645
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_123
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_645: # %cond.load477
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 121
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 120
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 6
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_646
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_124
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_646: # %cond.load481
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 122
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 121
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 5
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_647
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_125
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_647: # %cond.load485
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 123
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 122
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 4
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_648
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_126
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_648: # %cond.load489
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 124
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 123
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 3
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_649
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_127
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_649: # %cond.load493
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 125
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 124
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 2
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_1026
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_128
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1026: # %cond.load493
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_129
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_650: # %cond.load505
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 128
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 127
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m2, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 1
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_651
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_133
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_651: # %cond.load509
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 129
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 128
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 2
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_652
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_134
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_652: # %cond.load513
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 130
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 129
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 4
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_653
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_135
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_653: # %cond.load517
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 131
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 130
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 8
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_654
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_136
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_654: # %cond.load521
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 132
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 131
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 16
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_655
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_137
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_655: # %cond.load525
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 133
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 132
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 32
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_656
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_138
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_656: # %cond.load529
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 134
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 133
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 64
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_657
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_139
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_657: # %cond.load533
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 135
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 134
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 128
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_658
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_140
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_658: # %cond.load537
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 136
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 135
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 256
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_659
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_141
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_659: # %cond.load541
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 137
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 136
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 512
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_660
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_142
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_660: # %cond.load545
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 138
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 137
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 1024
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_661
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_143
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_661: # %cond.load549
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 139
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 138
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 52
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_662
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_144
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_662: # %cond.load553
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 140
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 139
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 51
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_663
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_145
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_663: # %cond.load557
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 141
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 140
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 50
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_664
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_146
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_664: # %cond.load561
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 142
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 141
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 49
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_665
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_147
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_665: # %cond.load565
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 143
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 142
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 48
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_666
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_148
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_666: # %cond.load569
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 144
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 143
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 47
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_667
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_149
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_667: # %cond.load573
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 145
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 144
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 46
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_668
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_150
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_668: # %cond.load577
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 146
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 145
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 45
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_669
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_151
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_669: # %cond.load581
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 147
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 146
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 44
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_670
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_152
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_670: # %cond.load585
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 148
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 147
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 43
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_671
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_153
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_671: # %cond.load589
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 149
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 148
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 42
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_672
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_154
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_672: # %cond.load593
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 150
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 149
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 41
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_673
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_155
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_673: # %cond.load597
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 151
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 150
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 40
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_674
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_156
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_674: # %cond.load601
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 152
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 151
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 39
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_675
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_157
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_675: # %cond.load605
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 153
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 152
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 38
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_676
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_158
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_676: # %cond.load609
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 154
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 153
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 37
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_677
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_159
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_677: # %cond.load613
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 155
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 154
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 36
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_678
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_160
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_678: # %cond.load617
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 156
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 155
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 35
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_679
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_161
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_679: # %cond.load621
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 157
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 156
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 34
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_680
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_162
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_680: # %cond.load625
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 158
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 157
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 33
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_681
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_163
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_681: # %cond.load629
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 159
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 158
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 32
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_682
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_164
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_682: # %cond.load633
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 160
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 159
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 31
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_683
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_165
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_683: # %cond.load637
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 161
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 160
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 30
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_684
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_166
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_684: # %cond.load641
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 162
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 161
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 29
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_685
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_167
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_685: # %cond.load645
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 163
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 162
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 28
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_686
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_168
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_686: # %cond.load649
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 164
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 163
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 27
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_687
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_169
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_687: # %cond.load653
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 165
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 164
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 26
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_688
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_170
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_688: # %cond.load657
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 166
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 165
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 25
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_689
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_171
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_689: # %cond.load661
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 167
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 166
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 24
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_690
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_172
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_690: # %cond.load665
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 168
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 167
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 23
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_691
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_173
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_691: # %cond.load669
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 169
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 168
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 22
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_692
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_174
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_692: # %cond.load673
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 170
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 169
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 21
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_693
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_175
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_693: # %cond.load677
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 171
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 170
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 20
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_694
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_176
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_694: # %cond.load681
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 172
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 171
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 19
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_695
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_177
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_695: # %cond.load685
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 173
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 172
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 18
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_696
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_178
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_696: # %cond.load689
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 174
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 173
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 17
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_697
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_179
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_697: # %cond.load693
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 175
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 174
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 16
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_698
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_180
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_698: # %cond.load697
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 176
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 175
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 15
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_699
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_181
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_699: # %cond.load701
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 177
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 176
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 14
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_700
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_182
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_700: # %cond.load705
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 178
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 177
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 13
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_701
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_183
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_701: # %cond.load709
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 179
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 178
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 12
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_702
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_184
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_702: # %cond.load713
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 180
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 179
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 11
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_703
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_185
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_703: # %cond.load717
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 181
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 180
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 10
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_704
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_186
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_704: # %cond.load721
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 182
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 181
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 9
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_705
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_187
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_705: # %cond.load725
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 183
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 182
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 8
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_706
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_188
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_706: # %cond.load729
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 184
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 183
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 7
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_707
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_189
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_707: # %cond.load733
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 185
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 184
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 6
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_708
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_190
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_708: # %cond.load737
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 186
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 185
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 5
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_709
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_191
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_709: # %cond.load741
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 187
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 186
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 4
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_710
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_192
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_710: # %cond.load745
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 188
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 187
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 3
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_711
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_193
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_711: # %cond.load749
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 189
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 188
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 2
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_1027
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_194
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1027: # %cond.load749
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_195
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_712: # %cond.load761
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 192
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 191
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 1
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_713
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_199
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_713: # %cond.load765
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 193
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 192
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 2
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_714
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_200
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_714: # %cond.load769
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 194
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 193
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 4
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_715
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_201
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_715: # %cond.load773
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 195
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 194
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 8
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_716
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_202
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_716: # %cond.load777
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 196
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 195
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 16
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_717
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_203
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_717: # %cond.load781
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 197
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 196
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 32
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_718
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_204
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_718: # %cond.load785
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 198
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 197
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 64
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_719
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_205
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_719: # %cond.load789
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 199
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 198
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 128
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_720
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_206
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_720: # %cond.load793
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 200
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 199
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 256
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_721
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_207
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_721: # %cond.load797
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 201
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 200
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 512
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_722
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_208
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_722: # %cond.load801
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 202
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 201
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 1024
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_723
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_209
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_723: # %cond.load805
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 203
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 202
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 52
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_724
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_210
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_724: # %cond.load809
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 204
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 203
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 51
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_725
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_211
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_725: # %cond.load813
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 205
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 204
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 50
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_726
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_212
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_726: # %cond.load817
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 206
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 205
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 49
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_727
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_213
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_727: # %cond.load821
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 207
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 206
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 48
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_728
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_214
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_728: # %cond.load825
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 208
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 207
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 47
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_729
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_215
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_729: # %cond.load829
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 209
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 208
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 46
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_730
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_216
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_730: # %cond.load833
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 210
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 209
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 45
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_731
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_217
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_731: # %cond.load837
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 211
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 210
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 44
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_732
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_218
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_732: # %cond.load841
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 212
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 211
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 43
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_733
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_219
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_733: # %cond.load845
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 213
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 212
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 42
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_734
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_220
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_734: # %cond.load849
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 214
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 213
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 41
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_735
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_221
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_735: # %cond.load853
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 215
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 214
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 40
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_736
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_222
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_736: # %cond.load857
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 216
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 215
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 39
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_737
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_223
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_737: # %cond.load861
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 217
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 216
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 38
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_738
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_224
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_738: # %cond.load865
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 218
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 217
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 37
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_739
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_225
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_739: # %cond.load869
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 219
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 218
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 36
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_740
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_226
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_740: # %cond.load873
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 220
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 219
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 35
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_741
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_227
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_741: # %cond.load877
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 221
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 220
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 34
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_742
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_228
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_742: # %cond.load881
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 222
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 221
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 33
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_743
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_229
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_743: # %cond.load885
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 223
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 222
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 32
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_744
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_230
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_744: # %cond.load889
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 224
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 223
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 31
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_745
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_231
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_745: # %cond.load893
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 225
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 224
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 30
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_746
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_232
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_746: # %cond.load897
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 226
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 225
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 29
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_747
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_233
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_747: # %cond.load901
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 227
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 226
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 28
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_748
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_234
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_748: # %cond.load905
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 228
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 227
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 27
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_749
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_235
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_749: # %cond.load909
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 229
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 228
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 26
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_750
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_236
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_750: # %cond.load913
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 230
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 229
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 25
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_751
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_237
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_751: # %cond.load917
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 231
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 230
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 24
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_752
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_238
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_752: # %cond.load921
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 232
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 231
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 23
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_753
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_239
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_753: # %cond.load925
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 233
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 232
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 22
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_754
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_240
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_754: # %cond.load929
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 234
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 233
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 21
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_755
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_241
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_755: # %cond.load933
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 235
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 234
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 20
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_756
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_242
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_756: # %cond.load937
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 236
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 235
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 19
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_757
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_243
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_757: # %cond.load941
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 237
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 236
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 18
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_758
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_244
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_758: # %cond.load945
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 238
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 237
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 17
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_759
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_245
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_759: # %cond.load949
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 239
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 238
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 16
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_760
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_246
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_760: # %cond.load953
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 240
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 239
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 15
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_761
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_247
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_761: # %cond.load957
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 241
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 240
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 14
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_762
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_248
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_762: # %cond.load961
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 242
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 241
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 13
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_763
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_249
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_763: # %cond.load965
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 243
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 242
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 12
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_764
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_250
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_764: # %cond.load969
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 244
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 243
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 11
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_765
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_251
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_765: # %cond.load973
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 245
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 244
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 10
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_766
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_252
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_766: # %cond.load977
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 246
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 245
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 9
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_767
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_253
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_767: # %cond.load981
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 247
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 246
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 8
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_768
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_254
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_768: # %cond.load985
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 248
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 247
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 7
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_769
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_255
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_769: # %cond.load989
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 249
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 248
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 6
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_770
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_256
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_770: # %cond.load993
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 250
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 249
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 5
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_771
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_257
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_771: # %cond.load997
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 251
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 250
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 4
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_772
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_258
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_772: # %cond.load1001
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 252
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 251
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 3
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_773
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_259
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_773: # %cond.load1005
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 253
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 252
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 2
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_1028
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_260
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1028: # %cond.load1005
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_261
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_774: # %cond.load1017
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 256
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 255
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 1
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_775
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_265
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_775: # %cond.load1021
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 257
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 256
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 2
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_776
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_266
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_776: # %cond.load1025
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 258
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 257
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 4
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_777
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_267
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_777: # %cond.load1029
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 259
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 258
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 8
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_778
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_268
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_778: # %cond.load1033
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 260
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 259
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 16
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_779
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_269
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_779: # %cond.load1037
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 261
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 260
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 32
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_780
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_270
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_780: # %cond.load1041
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 262
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 261
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 64
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_781
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_271
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_781: # %cond.load1045
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 263
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 262
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 128
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_782
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_272
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_782: # %cond.load1049
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 264
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 263
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 256
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_783
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_273
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_783: # %cond.load1053
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 265
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 264
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 512
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_784
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_274
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_784: # %cond.load1057
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 266
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 265
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 1024
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_785
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_275
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_785: # %cond.load1061
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 267
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 266
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 52
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_786
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_276
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_786: # %cond.load1065
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 268
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 267
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 51
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_787
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_277
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_787: # %cond.load1069
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 269
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 268
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 50
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_788
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_278
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_788: # %cond.load1073
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 270
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 269
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 49
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_789
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_279
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_789: # %cond.load1077
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 271
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 270
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 48
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_790
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_280
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_790: # %cond.load1081
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 272
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 271
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 47
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_791
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_281
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_791: # %cond.load1085
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 273
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 272
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 46
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_792
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_282
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_792: # %cond.load1089
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 274
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 273
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 45
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_793
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_283
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_793: # %cond.load1093
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 275
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 274
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 44
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_794
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_284
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_794: # %cond.load1097
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 276
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 275
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 43
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_795
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_285
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_795: # %cond.load1101
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 277
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 276
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 42
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_796
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_286
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_796: # %cond.load1105
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 278
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 277
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 41
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_797
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_287
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_797: # %cond.load1109
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 279
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 278
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 40
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_798
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_288
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_798: # %cond.load1113
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 280
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 279
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 39
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_799
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_289
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_799: # %cond.load1117
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 281
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 280
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 38
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_800
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_290
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_800: # %cond.load1121
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 282
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 281
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 37
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_801
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_291
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_801: # %cond.load1125
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 283
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 282
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 36
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_802
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_292
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_802: # %cond.load1129
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 284
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 283
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 35
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_803
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_293
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_803: # %cond.load1133
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 285
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 284
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 34
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_804
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_294
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_804: # %cond.load1137
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 286
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 285
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 33
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_805
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_295
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_805: # %cond.load1141
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 287
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 286
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 32
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_806
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_296
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_806: # %cond.load1145
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 288
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 287
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 31
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_807
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_297
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_807: # %cond.load1149
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 289
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 288
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 30
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_808
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_298
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_808: # %cond.load1153
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 290
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 289
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 29
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_809
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_299
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_809: # %cond.load1157
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 291
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 290
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 28
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_810
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_300
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_810: # %cond.load1161
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 292
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 291
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 27
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_811
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_301
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_811: # %cond.load1165
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 293
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 292
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 26
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_812
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_302
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_812: # %cond.load1169
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 294
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 293
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 25
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_813
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_303
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_813: # %cond.load1173
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 295
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 294
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 24
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_814
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_304
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_814: # %cond.load1177
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 296
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 295
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 23
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_815
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_305
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_815: # %cond.load1181
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 297
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 296
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 22
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_816
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_306
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_816: # %cond.load1185
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 298
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 297
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 21
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_817
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_307
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_817: # %cond.load1189
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 299
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 298
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 20
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_818
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_308
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_818: # %cond.load1193
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 300
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 299
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 19
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_819
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_309
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_819: # %cond.load1197
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 301
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 300
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 18
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_820
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_310
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_820: # %cond.load1201
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 302
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 301
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 17
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_821
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_311
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_821: # %cond.load1205
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 303
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 302
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 16
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_822
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_312
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_822: # %cond.load1209
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 304
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 303
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 15
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_823
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_313
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_823: # %cond.load1213
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 305
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 304
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 14
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_824
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_314
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_824: # %cond.load1217
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 306
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 305
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 13
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_825
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_315
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_825: # %cond.load1221
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 307
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 306
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 12
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_826
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_316
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_826: # %cond.load1225
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 308
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 307
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 11
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_827
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_317
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_827: # %cond.load1229
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 309
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 308
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 10
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_828
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_318
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_828: # %cond.load1233
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 310
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 309
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 9
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_829
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_319
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_829: # %cond.load1237
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 311
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 310
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 8
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_830
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_320
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_830: # %cond.load1241
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 312
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 311
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 7
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_831
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_321
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_831: # %cond.load1245
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 313
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 312
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 6
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_832
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_322
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_832: # %cond.load1249
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 314
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 313
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 5
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_833
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_323
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_833: # %cond.load1253
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 315
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 314
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 4
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_834
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_324
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_834: # %cond.load1257
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 316
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 315
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 3
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_835
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_325
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_835: # %cond.load1261
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 317
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 316
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 2
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_1029
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_326
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1029: # %cond.load1261
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_327
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_836: # %cond.load1273
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 320
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 319
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 1
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_837
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_331
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_837: # %cond.load1277
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 321
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 320
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 2
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_838
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_332
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_838: # %cond.load1281
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 322
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 321
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 4
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_839
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_333
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_839: # %cond.load1285
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 323
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 322
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 8
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_840
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_334
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_840: # %cond.load1289
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 324
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 323
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 16
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_841
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_335
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_841: # %cond.load1293
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 325
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 324
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 32
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_842
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_336
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_842: # %cond.load1297
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 326
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 325
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 64
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_843
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_337
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_843: # %cond.load1301
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 327
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 326
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 128
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_844
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_338
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_844: # %cond.load1305
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 328
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 327
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 256
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_845
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_339
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_845: # %cond.load1309
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 329
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 328
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 512
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_846
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_340
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_846: # %cond.load1313
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 330
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 329
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 1024
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_847
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_341
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_847: # %cond.load1317
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 331
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 330
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 52
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_848
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_342
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_848: # %cond.load1321
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 332
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 331
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 51
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_849
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_343
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_849: # %cond.load1325
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 333
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 332
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 50
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_850
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_344
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_850: # %cond.load1329
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 334
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 333
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 49
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_851
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_345
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_851: # %cond.load1333
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 335
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 334
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 48
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_852
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_346
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_852: # %cond.load1337
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 336
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 335
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 47
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_853
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_347
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_853: # %cond.load1341
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 337
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 336
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 46
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_854
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_348
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_854: # %cond.load1345
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 338
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 337
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 45
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_855
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_349
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_855: # %cond.load1349
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 339
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 338
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 44
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_856
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_350
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_856: # %cond.load1353
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 340
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 339
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 43
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_857
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_351
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_857: # %cond.load1357
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 341
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 340
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 42
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_858
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_352
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_858: # %cond.load1361
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 342
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 341
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 41
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_859
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_353
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_859: # %cond.load1365
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 343
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 342
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 40
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_860
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_354
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_860: # %cond.load1369
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 344
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 343
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 39
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_861
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_355
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_861: # %cond.load1373
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 345
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 344
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 38
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_862
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_356
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_862: # %cond.load1377
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 346
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 345
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 37
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_863
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_357
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_863: # %cond.load1381
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 347
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 346
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 36
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_864
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_358
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_864: # %cond.load1385
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 348
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 347
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 35
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_865
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_359
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_865: # %cond.load1389
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 349
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 348
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 34
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_866
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_360
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_866: # %cond.load1393
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 350
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 349
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 33
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_867
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_361
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_867: # %cond.load1397
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 351
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 350
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 32
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_868
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_362
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_868: # %cond.load1401
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 352
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 351
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 31
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_869
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_363
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_869: # %cond.load1405
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 353
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 352
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 30
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_870
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_364
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_870: # %cond.load1409
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 354
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 353
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 29
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_871
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_365
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_871: # %cond.load1413
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 355
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 354
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 28
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_872
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_366
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_872: # %cond.load1417
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 356
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 355
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 27
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_873
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_367
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_873: # %cond.load1421
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 357
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 356
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 26
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_874
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_368
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_874: # %cond.load1425
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 358
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 357
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 25
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_875
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_369
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_875: # %cond.load1429
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 359
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 358
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 24
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_876
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_370
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_876: # %cond.load1433
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 360
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 359
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 23
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_877
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_371
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_877: # %cond.load1437
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 361
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 360
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 22
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_878
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_372
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_878: # %cond.load1441
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 362
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 361
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 21
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_879
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_373
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_879: # %cond.load1445
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 363
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 362
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 20
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_880
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_374
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_880: # %cond.load1449
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 364
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 363
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 19
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_881
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_375
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_881: # %cond.load1453
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 365
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 364
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 18
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_882
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_376
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_882: # %cond.load1457
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 366
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 365
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 17
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_883
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_377
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_883: # %cond.load1461
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 367
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 366
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 16
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_884
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_378
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_884: # %cond.load1465
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 368
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 367
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 15
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_885
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_379
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_885: # %cond.load1469
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 369
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 368
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 14
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_886
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_380
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_886: # %cond.load1473
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 370
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 369
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 13
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_887
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_381
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_887: # %cond.load1477
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 371
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 370
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 12
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_888
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_382
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_888: # %cond.load1481
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 372
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 371
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 11
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_889
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_383
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_889: # %cond.load1485
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 373
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 372
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 10
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_890
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_384
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_890: # %cond.load1489
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 374
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 373
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 9
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_891
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_385
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_891: # %cond.load1493
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 375
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 374
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 8
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_892
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_386
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_892: # %cond.load1497
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 376
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 375
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 7
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_893
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_387
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_893: # %cond.load1501
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 377
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 376
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 6
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_894
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_388
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_894: # %cond.load1505
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 378
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 377
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 5
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_895
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_389
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_895: # %cond.load1509
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 379
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 378
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 4
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_896
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_390
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_896: # %cond.load1513
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 380
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 379
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 3
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_897
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_391
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_897: # %cond.load1517
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 381
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 380
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 2
+; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_1030
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_392
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1030: # %cond.load1517
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_393
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_898: # %cond.load1529
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 384
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 383
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 1
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_899
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_397
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_899: # %cond.load1533
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 385
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 384
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 2
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_900
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_398
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_900: # %cond.load1537
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 386
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 385
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 4
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_901
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_399
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_901: # %cond.load1541
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 387
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 386
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 8
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_902
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_400
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_902: # %cond.load1545
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 388
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 387
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 16
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_903
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_401
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_903: # %cond.load1549
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 389
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 388
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 32
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_904
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_402
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_904: # %cond.load1553
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 390
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 389
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 64
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_905
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_403
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_905: # %cond.load1557
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 391
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 390
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 128
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_906
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_404
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_906: # %cond.load1561
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 392
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 391
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 256
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_907
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_405
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_907: # %cond.load1565
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 393
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 392
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 512
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_908
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_406
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_908: # %cond.load1569
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 394
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 393
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 1024
+; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_909
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_407
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_909: # %cond.load1573
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 395
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 394
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 52
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_910
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_408
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_910: # %cond.load1577
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 396
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 395
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 51
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_911
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_409
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_911: # %cond.load1581
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 397
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 396
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 50
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_912
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_410
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_912: # %cond.load1585
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 398
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 397
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 49
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_913
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_411
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_913: # %cond.load1589
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 399
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 398
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 48
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_914
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_412
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_914: # %cond.load1593
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 400
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 399
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 47
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_915
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_413
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_915: # %cond.load1597
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 401
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 400
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 46
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_916
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_414
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_916: # %cond.load1601
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 402
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 401
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 45
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_917
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_415
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_917: # %cond.load1605
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 403
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 402
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 44
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_918
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_416
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_918: # %cond.load1609
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 404
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 403
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 43
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_919
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_417
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_919: # %cond.load1613
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 405
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 404
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 42
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_920
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_418
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_920: # %cond.load1617
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 406
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 405
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 41
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_921
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_419
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_921: # %cond.load1621
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 407
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 406
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 40
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_922
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_420
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_922: # %cond.load1625
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 408
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 407
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 39
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_923
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_421
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_923: # %cond.load1629
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 409
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 408
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 38
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_924
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_422
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_924: # %cond.load1633
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 410
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 409
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 37
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_925
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_423
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_925: # %cond.load1637
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 411
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 410
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 36
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_926
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_424
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_926: # %cond.load1641
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 412
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 411
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 35
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_927
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_425
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_927: # %cond.load1645
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 413
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 412
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 34
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_928
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_426
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_928: # %cond.load1649
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 414
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 413
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 33
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_929
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_427
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_929: # %cond.load1653
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 415
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 414
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 32
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_930
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_428
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_930: # %cond.load1657
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 416
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 415
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 31
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_931
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_429
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_931: # %cond.load1661
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 417
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 416
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 30
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_932
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_430
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_932: # %cond.load1665
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 418
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 417
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 29
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_933
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_431
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_933: # %cond.load1669
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 419
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 418
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 28
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_934
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_432
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_934: # %cond.load1673
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 420
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 419
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 27
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_935
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_433
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_935: # %cond.load1677
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 421
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 420
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 26
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_936
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_434
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_936: # %cond.load1681
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 422
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 421
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 25
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_937
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_435
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_937: # %cond.load1685
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 423
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 422
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 24
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_938
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_436
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_938: # %cond.load1689
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 424
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 423
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 23
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_939
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_437
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_939: # %cond.load1693
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 425
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 424
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 22
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_940
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_438
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_940: # %cond.load1697
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 426
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 425
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 21
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_941
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_439
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_941: # %cond.load1701
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 427
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 426
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 20
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_942
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_440
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_942: # %cond.load1705
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 428
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 427
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 19
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_943
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_441
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_943: # %cond.load1709
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 429
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 428
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 18
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_944
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_442
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_944: # %cond.load1713
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 430
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 429
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 17
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_945
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_443
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_945: # %cond.load1717
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 431
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 430
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 16
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_946
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_444
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_946: # %cond.load1721
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 432
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 431
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 15
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_947
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_445
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_947: # %cond.load1725
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 433
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 432
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 14
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_948
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_446
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_948: # %cond.load1729
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 434
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 433
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 13
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_949
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_447
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_949: # %cond.load1733
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 435
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 434
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 12
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_950
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_448
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_950: # %cond.load1737
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 436
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 435
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 11
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_951
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_449
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_951: # %cond.load1741
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 437
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 436
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 10
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_952
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_450
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_952: # %cond.load1745
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 438
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 437
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 9
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_953
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_451
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_953: # %cond.load1749
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 439
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 438
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 8
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_954
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_452
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_954: # %cond.load1753
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 440
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 439
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 7
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_955
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_453
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_955: # %cond.load1757
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 441
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 440
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 6
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_956
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_454
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_956: # %cond.load1761
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 442
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 441
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 5
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_957
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_455
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_957: # %cond.load1765
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 443
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 442
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 4
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_958
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_456
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_958: # %cond.load1769
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 444
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 443
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 3
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_959
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_457
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_959: # %cond.load1773
+; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 445
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 444
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 2
+; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_1031
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_458
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1031: # %cond.load1773
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_459
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_960: # %cond.load1785
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 448
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 447
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 1
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_961
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_463
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_961: # %cond.load1789
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 449
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 448
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 2
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_962
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_464
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_962: # %cond.load1793
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 450
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 449
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 4
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_963
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_465
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_963: # %cond.load1797
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 451
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 450
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 8
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_964
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_466
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_964: # %cond.load1801
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 452
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 451
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 16
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_965
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_467
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_965: # %cond.load1805
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 453
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 452
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 32
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_966
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_468
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_966: # %cond.load1809
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 454
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 453
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 64
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_967
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_469
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_967: # %cond.load1813
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 455
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 454
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 128
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_968
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_470
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_968: # %cond.load1817
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 456
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 455
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 256
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_969
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_471
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_969: # %cond.load1821
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 457
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 456
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 512
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_970
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_472
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_970: # %cond.load1825
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 458
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 457
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 1024
+; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_971
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_473
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_971: # %cond.load1829
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 459
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 458
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 52
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_972
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_474
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_972: # %cond.load1833
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 460
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 459
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 51
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_973
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_475
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_973: # %cond.load1837
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 461
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 460
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 50
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_974
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_476
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_974: # %cond.load1841
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 462
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 461
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 49
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_975
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_477
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_975: # %cond.load1845
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 463
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 462
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 48
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_976
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_478
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_976: # %cond.load1849
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 464
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 463
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 47
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_977
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_479
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_977: # %cond.load1853
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 465
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 464
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 46
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_978
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_480
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_978: # %cond.load1857
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 466
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 465
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 45
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_979
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_481
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_979: # %cond.load1861
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 467
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 466
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 44
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_980
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_482
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_980: # %cond.load1865
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 468
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 467
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 43
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_981
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_483
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_981: # %cond.load1869
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 469
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 468
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 42
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_982
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_484
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_982: # %cond.load1873
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 470
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 469
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 41
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_983
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_485
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_983: # %cond.load1877
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 471
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 470
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 40
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_984
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_486
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_984: # %cond.load1881
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 472
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 471
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 39
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_985
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_487
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_985: # %cond.load1885
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 473
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 472
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 38
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_986
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_488
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_986: # %cond.load1889
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 474
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 473
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 37
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_987
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_489
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_987: # %cond.load1893
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 475
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 474
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 36
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_988
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_490
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_988: # %cond.load1897
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 476
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 475
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 35
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_989
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_491
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_989: # %cond.load1901
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 477
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 476
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 34
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_990
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_492
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_990: # %cond.load1905
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 478
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 477
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 33
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_991
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_493
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_991: # %cond.load1909
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 479
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 478
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 32
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_992
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_494
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_992: # %cond.load1913
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 480
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 479
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 31
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_993
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_495
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_993: # %cond.load1917
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 481
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 480
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 30
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_994
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_496
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_994: # %cond.load1921
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 482
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 481
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 29
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_995
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_497
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_995: # %cond.load1925
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 483
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 482
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 28
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_996
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_498
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_996: # %cond.load1929
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 484
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 483
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 27
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_997
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_499
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_997: # %cond.load1933
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 485
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 484
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 26
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_998
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_500
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_998: # %cond.load1937
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 486
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 485
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 25
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_999
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_501
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_999: # %cond.load1941
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 487
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 486
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 24
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1000
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_502
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1000: # %cond.load1945
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 488
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 487
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 23
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1001
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_503
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1001: # %cond.load1949
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 489
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 488
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 22
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1002
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_504
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1002: # %cond.load1953
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 490
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 489
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 21
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1003
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_505
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1003: # %cond.load1957
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 491
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 490
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 20
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1004
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_506
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1004: # %cond.load1961
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 492
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 491
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 19
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1005
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_507
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1005: # %cond.load1965
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 493
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 492
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 18
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1006
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_508
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1006: # %cond.load1969
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 494
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 493
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 17
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1007
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_509
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1007: # %cond.load1973
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 495
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 494
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 16
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1008
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_510
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1008: # %cond.load1977
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 496
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 495
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 15
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1009
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_511
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1009: # %cond.load1981
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 497
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 496
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 14
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1010
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_512
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1010: # %cond.load1985
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 498
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 497
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 13
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1011
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_513
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1011: # %cond.load1989
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 499
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 498
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 12
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1012
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_514
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1012: # %cond.load1993
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 500
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 499
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 11
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1013
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_515
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1013: # %cond.load1997
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 501
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 500
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 10
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1014
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_516
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1014: # %cond.load2001
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 502
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 501
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 9
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1015
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_517
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1015: # %cond.load2005
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 503
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 502
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 8
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1016
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_518
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1016: # %cond.load2009
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 504
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 503
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 7
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1017
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_519
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1017: # %cond.load2013
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 505
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 504
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 6
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1018
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_520
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1018: # %cond.load2017
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 506
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 505
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 5
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1019
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_521
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1019: # %cond.load2021
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 507
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 506
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 4
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1020
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_522
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1020: # %cond.load2025
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 508
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 507
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 3
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1021
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_523
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1021: # %cond.load2029
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 509
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 508
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 2
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1022
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_524
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1022: # %cond.load2033
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 510
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 509
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 1
+; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1023
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_525
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1023: # %cond.load2037
+; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-VRGATHER-RV64-NEXT:    li a2, 511
+; CHECK-VRGATHER-RV64-NEXT:    li a3, 510
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
+; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_1024
+; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_526
+; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1024: # %cond.load2041
+; CHECK-VRGATHER-RV64-NEXT:    lbu a0, 0(a0)
+; CHECK-VRGATHER-RV64-NEXT:    li a1, 512
+; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
+; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a0
+; CHECK-VRGATHER-RV64-NEXT:    li a0, 511
+; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a0
+; CHECK-VRGATHER-RV64-NEXT:    ret
+;
+; CHECK-INDEXED-RV32-LABEL: test_expandload_v512i8_vlen512:
+; CHECK-INDEXED-RV32:       # %bb.0:
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a3, v0
+; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 1
+; CHECK-INDEXED-RV32-NEXT:    beqz a1, .LBB61_1
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_544
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1: # %else
+; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 2
+; CHECK-INDEXED-RV32-NEXT:    beqz a1, .LBB61_2
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_545
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_2: # %else2
+; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 4
+; CHECK-INDEXED-RV32-NEXT:    beqz a1, .LBB61_3
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_546
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_3: # %else6
+; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 8
+; CHECK-INDEXED-RV32-NEXT:    beqz a1, .LBB61_4
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_547
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_4: # %else10
+; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 16
+; CHECK-INDEXED-RV32-NEXT:    beqz a1, .LBB61_5
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_548
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_5: # %else14
+; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 32
+; CHECK-INDEXED-RV32-NEXT:    beqz a1, .LBB61_6
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_549
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_6: # %else18
+; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 64
+; CHECK-INDEXED-RV32-NEXT:    beqz a1, .LBB61_7
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_550
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_7: # %else22
+; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 128
+; CHECK-INDEXED-RV32-NEXT:    beqz a1, .LBB61_8
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_551
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_8: # %else26
+; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 256
+; CHECK-INDEXED-RV32-NEXT:    beqz a1, .LBB61_9
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_552
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_9: # %else30
+; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 512
+; CHECK-INDEXED-RV32-NEXT:    beqz a1, .LBB61_10
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_553
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_10: # %else34
+; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 1024
+; CHECK-INDEXED-RV32-NEXT:    beqz a1, .LBB61_11
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_554
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_11: # %else38
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 20
+; CHECK-INDEXED-RV32-NEXT:    bgez a1, .LBB61_12
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_555
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_12: # %else42
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 19
+; CHECK-INDEXED-RV32-NEXT:    bgez a1, .LBB61_13
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_556
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_13: # %else46
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 18
+; CHECK-INDEXED-RV32-NEXT:    bgez a1, .LBB61_14
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_557
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_14: # %else50
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 17
+; CHECK-INDEXED-RV32-NEXT:    bgez a1, .LBB61_15
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_558
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_15: # %else54
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 16
+; CHECK-INDEXED-RV32-NEXT:    bgez a1, .LBB61_16
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_559
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_16: # %else58
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 15
+; CHECK-INDEXED-RV32-NEXT:    bgez a1, .LBB61_17
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_560
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_17: # %else62
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 14
+; CHECK-INDEXED-RV32-NEXT:    bgez a1, .LBB61_18
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_561
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_18: # %else66
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 13
+; CHECK-INDEXED-RV32-NEXT:    bgez a1, .LBB61_19
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_562
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_19: # %else70
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 12
+; CHECK-INDEXED-RV32-NEXT:    bgez a1, .LBB61_20
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_563
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_20: # %else74
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 11
+; CHECK-INDEXED-RV32-NEXT:    bgez a1, .LBB61_21
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_564
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_21: # %else78
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 10
+; CHECK-INDEXED-RV32-NEXT:    bgez a1, .LBB61_22
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_565
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_22: # %else82
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 9
+; CHECK-INDEXED-RV32-NEXT:    bgez a1, .LBB61_23
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_566
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_23: # %else86
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 8
+; CHECK-INDEXED-RV32-NEXT:    bgez a1, .LBB61_24
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_567
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_24: # %else90
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 7
+; CHECK-INDEXED-RV32-NEXT:    bgez a1, .LBB61_25
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_568
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_25: # %else94
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 6
+; CHECK-INDEXED-RV32-NEXT:    bgez a1, .LBB61_26
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_569
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_26: # %else98
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 5
+; CHECK-INDEXED-RV32-NEXT:    bgez a1, .LBB61_27
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_570
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_27: # %else102
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 4
+; CHECK-INDEXED-RV32-NEXT:    bgez a1, .LBB61_28
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_571
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_28: # %else106
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 3
+; CHECK-INDEXED-RV32-NEXT:    bgez a1, .LBB61_30
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_29: # %cond.load109
+; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 29, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 28
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_30: # %else110
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 2
+; CHECK-INDEXED-RV32-NEXT:    li a1, 32
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_32
+; CHECK-INDEXED-RV32-NEXT:  # %bb.31: # %cond.load113
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 30, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 29
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_32: # %else114
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 1
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vsrl.vx v16, v0, a1
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_34
+; CHECK-INDEXED-RV32-NEXT:  # %bb.33: # %cond.load117
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v17, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 31, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v17, 30
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_34: # %else118
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a2, v16
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_35
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_572
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_35: # %else122
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_36
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_573
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_36: # %else126
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 2
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_37
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_574
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_37: # %else130
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 4
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_38
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_575
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_38: # %else134
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 8
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_39
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_576
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_39: # %else138
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 16
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_40
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_577
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_40: # %else142
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 32
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_41
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_578
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_41: # %else146
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 64
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_42
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_579
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_42: # %else150
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 128
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_43
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_580
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_43: # %else154
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 256
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_44
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_581
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_44: # %else158
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 512
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_45
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_582
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_45: # %else162
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_46
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_583
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_46: # %else166
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 20
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_47
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_584
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_47: # %else170
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 19
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_48
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_585
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_48: # %else174
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 18
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_49
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_586
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_49: # %else178
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 17
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_50
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_587
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_50: # %else182
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 16
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_51
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_588
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_51: # %else186
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 15
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_52
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_589
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_52: # %else190
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 14
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_53
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_590
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_53: # %else194
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 13
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_54
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_591
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_54: # %else198
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 12
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_55
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_592
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_55: # %else202
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 11
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_56
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_593
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_56: # %else206
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 10
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_57
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_594
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_57: # %else210
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 9
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_58
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_595
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_58: # %else214
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 8
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_59
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_596
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_59: # %else218
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 7
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_60
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_597
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_60: # %else222
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 6
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_61
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_598
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_61: # %else226
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 5
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_62
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_599
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_62: # %else230
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 4
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_63
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_600
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_63: # %else234
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 3
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_64
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_601
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_64: # %else238
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 2
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_66
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_65: # %cond.load241
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 62
+; CHECK-INDEXED-RV32-NEXT:    li a4, 61
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_66: # %else242
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 1
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vslidedown.vi v16, v0, 1
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_68
+; CHECK-INDEXED-RV32-NEXT:  # %bb.67: # %cond.load245
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v17, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 63
+; CHECK-INDEXED-RV32-NEXT:    li a4, 62
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v17, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_68: # %else246
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a3, v16
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_69
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_602
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_69: # %else250
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_70
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_603
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_70: # %else254
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 2
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_71
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_604
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_71: # %else258
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 4
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_72
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_605
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_72: # %else262
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 8
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_73
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_606
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_73: # %else266
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 16
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_74
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_607
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_74: # %else270
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 32
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_75
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_608
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_75: # %else274
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 64
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_76
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_609
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_76: # %else278
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 128
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_77
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_610
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_77: # %else282
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 256
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_78
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_611
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_78: # %else286
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 512
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_79
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_612
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_79: # %else290
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_80
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_613
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_80: # %else294
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 20
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_81
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_614
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_81: # %else298
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 19
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_82
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_615
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_82: # %else302
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 18
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_83
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_616
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_83: # %else306
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 17
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_84
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_617
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_84: # %else310
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 16
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_85
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_618
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_85: # %else314
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 15
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_86
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_619
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_86: # %else318
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 14
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_87
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_620
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_87: # %else322
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 13
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_88
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_621
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_88: # %else326
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 12
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_89
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_622
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_89: # %else330
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 11
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_90
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_623
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_90: # %else334
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 10
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_91
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_624
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_91: # %else338
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 9
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_92
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_625
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_92: # %else342
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 8
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_93
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_626
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_93: # %else346
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 7
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_94
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_627
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_94: # %else350
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 6
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_95
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_628
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_95: # %else354
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 5
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_96
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_629
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_96: # %else358
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 4
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_97
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_630
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_97: # %else362
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 3
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_98
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_631
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_98: # %else366
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 2
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_100
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_99: # %cond.load369
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 94
+; CHECK-INDEXED-RV32-NEXT:    li a4, 93
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_100: # %else370
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 1
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vsrl.vx v16, v16, a1
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_102
+; CHECK-INDEXED-RV32-NEXT:  # %bb.101: # %cond.load373
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 95
+; CHECK-INDEXED-RV32-NEXT:    li a4, 94
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_102: # %else374
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a2, v16
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_103
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_632
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_103: # %else378
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_104
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_633
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_104: # %else382
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 2
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_105
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_634
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_105: # %else386
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 4
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_106
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_635
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_106: # %else390
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 8
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_107
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_636
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_107: # %else394
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 16
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_108
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_637
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_108: # %else398
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 32
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_109
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_638
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_109: # %else402
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 64
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_110
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_639
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_110: # %else406
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 128
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_111
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_640
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_111: # %else410
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 256
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_112
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_641
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_112: # %else414
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 512
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_113
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_642
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_113: # %else418
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_114
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_643
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_114: # %else422
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 20
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_115
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_644
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_115: # %else426
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 19
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_116
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_645
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_116: # %else430
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 18
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_117
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_646
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_117: # %else434
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 17
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_118
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_647
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_118: # %else438
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 16
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_119
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_648
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_119: # %else442
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 15
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_120
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_649
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_120: # %else446
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 14
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_121
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_650
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_121: # %else450
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 13
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_122
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_651
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_122: # %else454
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 12
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_123
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_652
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_123: # %else458
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 11
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_124
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_653
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_124: # %else462
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 10
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_125
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_654
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_125: # %else466
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 9
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_126
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_655
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_126: # %else470
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 8
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_127
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_656
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_127: # %else474
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 7
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_128
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_657
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_128: # %else478
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 6
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_129
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_658
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_129: # %else482
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 5
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_130
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_659
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_130: # %else486
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 4
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_131
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_660
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_131: # %else490
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 3
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_132
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_661
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_132: # %else494
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 2
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_134
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_133: # %cond.load497
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 126
+; CHECK-INDEXED-RV32-NEXT:    li a4, 125
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_134: # %else498
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 1
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vslidedown.vi v16, v0, 2
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_136
+; CHECK-INDEXED-RV32-NEXT:  # %bb.135: # %cond.load501
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 127
+; CHECK-INDEXED-RV32-NEXT:    li a4, 126
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_136: # %else502
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a3, v16
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_137
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_662
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_137: # %else506
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_138
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_663
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_138: # %else510
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 2
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_139
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_664
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_139: # %else514
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 4
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_140
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_665
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_140: # %else518
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 8
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_141
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_666
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_141: # %else522
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 16
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_142
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_667
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_142: # %else526
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 32
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_143
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_668
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_143: # %else530
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 64
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_144
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_669
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_144: # %else534
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 128
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_145
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_670
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_145: # %else538
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 256
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_146
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_671
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_146: # %else542
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 512
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_147
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_672
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_147: # %else546
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_148
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_673
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_148: # %else550
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 20
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_149
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_674
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_149: # %else554
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 19
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_150
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_675
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_150: # %else558
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 18
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_151
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_676
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_151: # %else562
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 17
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_152
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_677
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_152: # %else566
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 16
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_153
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_678
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_153: # %else570
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 15
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_154
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_679
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_154: # %else574
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 14
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_155
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_680
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_155: # %else578
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 13
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_156
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_681
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_156: # %else582
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 12
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_157
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_682
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_157: # %else586
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 11
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_158
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_683
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_158: # %else590
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 10
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_159
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_684
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_159: # %else594
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 9
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_160
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_685
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_160: # %else598
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 8
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_161
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_686
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_161: # %else602
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 7
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_162
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_687
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_162: # %else606
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 6
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_163
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_688
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_163: # %else610
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 5
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_164
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_689
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_164: # %else614
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 4
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_165
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_690
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_165: # %else618
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 3
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_166
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_691
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_166: # %else622
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 2
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_168
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_167: # %cond.load625
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 158
+; CHECK-INDEXED-RV32-NEXT:    li a4, 157
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_168: # %else626
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 1
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vsrl.vx v16, v16, a1
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_170
+; CHECK-INDEXED-RV32-NEXT:  # %bb.169: # %cond.load629
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 159
+; CHECK-INDEXED-RV32-NEXT:    li a4, 158
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_170: # %else630
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a2, v16
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_171
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_692
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_171: # %else634
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_172
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_693
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_172: # %else638
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 2
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_173
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_694
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_173: # %else642
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 4
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_174
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_695
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_174: # %else646
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 8
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_175
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_696
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_175: # %else650
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 16
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_176
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_697
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_176: # %else654
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 32
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_177
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_698
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_177: # %else658
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 64
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_178
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_699
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_178: # %else662
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 128
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_179
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_700
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_179: # %else666
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 256
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_180
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_701
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_180: # %else670
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 512
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_181
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_702
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_181: # %else674
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_182
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_703
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_182: # %else678
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 20
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_183
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_704
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_183: # %else682
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 19
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_184
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_705
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_184: # %else686
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 18
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_185
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_706
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_185: # %else690
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 17
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_186
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_707
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_186: # %else694
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 16
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_187
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_708
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_187: # %else698
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 15
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_188
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_709
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_188: # %else702
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 14
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_189
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_710
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_189: # %else706
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 13
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_190
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_711
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_190: # %else710
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 12
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_191
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_712
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_191: # %else714
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 11
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_192
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_713
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_192: # %else718
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 10
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_193
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_714
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_193: # %else722
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 9
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_194
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_715
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_194: # %else726
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 8
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_195
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_716
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_195: # %else730
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 7
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_196
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_717
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_196: # %else734
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 6
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_197
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_718
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_197: # %else738
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 5
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_198
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_719
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_198: # %else742
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 4
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_199
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_720
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_199: # %else746
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 3
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_200
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_721
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_200: # %else750
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 2
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_202
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_201: # %cond.load753
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 190
+; CHECK-INDEXED-RV32-NEXT:    li a4, 189
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_202: # %else754
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 1
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vslidedown.vi v16, v0, 3
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_204
+; CHECK-INDEXED-RV32-NEXT:  # %bb.203: # %cond.load757
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 191
+; CHECK-INDEXED-RV32-NEXT:    li a4, 190
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_204: # %else758
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a3, v16
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_205
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_722
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_205: # %else762
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_206
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_723
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_206: # %else766
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 2
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_207
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_724
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_207: # %else770
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 4
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_208
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_725
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_208: # %else774
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 8
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_209
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_726
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_209: # %else778
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 16
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_210
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_727
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_210: # %else782
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 32
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_211
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_728
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_211: # %else786
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 64
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_212
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_729
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_212: # %else790
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 128
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_213
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_730
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_213: # %else794
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 256
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_214
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_731
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_214: # %else798
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 512
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_215
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_732
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_215: # %else802
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_216
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_733
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_216: # %else806
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 20
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_217
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_734
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_217: # %else810
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 19
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_218
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_735
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_218: # %else814
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 18
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_219
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_736
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_219: # %else818
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 17
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_220
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_737
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_220: # %else822
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 16
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_221
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_738
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_221: # %else826
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 15
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_222
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_739
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_222: # %else830
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 14
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_223
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_740
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_223: # %else834
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 13
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_224
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_741
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_224: # %else838
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 12
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_225
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_742
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_225: # %else842
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 11
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_226
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_743
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_226: # %else846
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 10
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_227
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_744
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_227: # %else850
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 9
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_228
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_745
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_228: # %else854
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 8
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_229
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_746
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_229: # %else858
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 7
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_230
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_747
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_230: # %else862
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 6
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_231
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_748
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_231: # %else866
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 5
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_232
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_749
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_232: # %else870
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 4
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_233
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_750
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_233: # %else874
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 3
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_234
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_751
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_234: # %else878
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 2
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_236
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_235: # %cond.load881
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 222
+; CHECK-INDEXED-RV32-NEXT:    li a4, 221
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_236: # %else882
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 1
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vsrl.vx v16, v16, a1
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_238
+; CHECK-INDEXED-RV32-NEXT:  # %bb.237: # %cond.load885
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 223
+; CHECK-INDEXED-RV32-NEXT:    li a4, 222
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_238: # %else886
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a2, v16
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_239
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_752
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_239: # %else890
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_240
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_753
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_240: # %else894
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 2
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_241
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_754
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_241: # %else898
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 4
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_242
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_755
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_242: # %else902
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 8
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_243
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_756
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_243: # %else906
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 16
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_244
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_757
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_244: # %else910
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 32
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_245
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_758
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_245: # %else914
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 64
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_246
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_759
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_246: # %else918
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 128
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_247
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_760
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_247: # %else922
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 256
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_248
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_761
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_248: # %else926
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 512
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_249
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_762
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_249: # %else930
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_250
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_763
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_250: # %else934
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 20
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_251
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_764
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_251: # %else938
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 19
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_252
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_765
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_252: # %else942
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 18
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_253
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_766
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_253: # %else946
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 17
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_254
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_767
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_254: # %else950
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 16
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_255
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_768
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_255: # %else954
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 15
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_256
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_769
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_256: # %else958
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 14
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_257
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_770
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_257: # %else962
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 13
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_258
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_771
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_258: # %else966
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 12
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_259
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_772
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_259: # %else970
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 11
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_260
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_773
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_260: # %else974
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 10
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_261
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_774
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_261: # %else978
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 9
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_262
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_775
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_262: # %else982
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 8
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_263
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_776
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_263: # %else986
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 7
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_264
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_777
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_264: # %else990
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 6
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_265
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_778
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_265: # %else994
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 5
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_266
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_779
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_266: # %else998
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 4
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_267
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_780
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_267: # %else1002
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 3
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_268
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_781
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_268: # %else1006
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 2
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_270
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_269: # %cond.load1009
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 254
+; CHECK-INDEXED-RV32-NEXT:    li a4, 253
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_270: # %else1010
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 1
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vslidedown.vi v16, v0, 4
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_272
+; CHECK-INDEXED-RV32-NEXT:  # %bb.271: # %cond.load1013
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 255
+; CHECK-INDEXED-RV32-NEXT:    li a4, 254
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_272: # %else1014
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a3, v16
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_273
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_782
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_273: # %else1018
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_274
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_783
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_274: # %else1022
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 2
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_275
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_784
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_275: # %else1026
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 4
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_276
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_785
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_276: # %else1030
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 8
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_277
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_786
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_277: # %else1034
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 16
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_278
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_787
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_278: # %else1038
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 32
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_279
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_788
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_279: # %else1042
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 64
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_280
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_789
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_280: # %else1046
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 128
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_281
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_790
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_281: # %else1050
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 256
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_282
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_791
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_282: # %else1054
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 512
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_283
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_792
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_283: # %else1058
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_284
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_793
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_284: # %else1062
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 20
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_285
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_794
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_285: # %else1066
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 19
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_286
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_795
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_286: # %else1070
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 18
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_287
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_796
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_287: # %else1074
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 17
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_288
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_797
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_288: # %else1078
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 16
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_289
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_798
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_289: # %else1082
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 15
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_290
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_799
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_290: # %else1086
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 14
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_291
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_800
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_291: # %else1090
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 13
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_292
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_801
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_292: # %else1094
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 12
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_293
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_802
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_293: # %else1098
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 11
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_294
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_803
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_294: # %else1102
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 10
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_295
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_804
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_295: # %else1106
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 9
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_296
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_805
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_296: # %else1110
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 8
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_297
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_806
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_297: # %else1114
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 7
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_298
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_807
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_298: # %else1118
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 6
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_299
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_808
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_299: # %else1122
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 5
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_300
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_809
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_300: # %else1126
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 4
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_301
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_810
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_301: # %else1130
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 3
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_302
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_811
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_302: # %else1134
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 2
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_304
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_303: # %cond.load1137
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 286
+; CHECK-INDEXED-RV32-NEXT:    li a4, 285
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_304: # %else1138
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 1
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vsrl.vx v16, v16, a1
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_306
+; CHECK-INDEXED-RV32-NEXT:  # %bb.305: # %cond.load1141
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 287
+; CHECK-INDEXED-RV32-NEXT:    li a4, 286
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_306: # %else1142
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a2, v16
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_307
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_812
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_307: # %else1146
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_308
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_813
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_308: # %else1150
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 2
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_309
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_814
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_309: # %else1154
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 4
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_310
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_815
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_310: # %else1158
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 8
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_311
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_816
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_311: # %else1162
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 16
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_312
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_817
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_312: # %else1166
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 32
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_313
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_818
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_313: # %else1170
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 64
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_314
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_819
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_314: # %else1174
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 128
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_315
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_820
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_315: # %else1178
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 256
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_316
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_821
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_316: # %else1182
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 512
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_317
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_822
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_317: # %else1186
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_318
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_823
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_318: # %else1190
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 20
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_319
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_824
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_319: # %else1194
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 19
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_320
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_825
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_320: # %else1198
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 18
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_321
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_826
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_321: # %else1202
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 17
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_322
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_827
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_322: # %else1206
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 16
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_323
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_828
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_323: # %else1210
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 15
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_324
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_829
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_324: # %else1214
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 14
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_325
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_830
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_325: # %else1218
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 13
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_326
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_831
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_326: # %else1222
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 12
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_327
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_832
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_327: # %else1226
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 11
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_328
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_833
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_328: # %else1230
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 10
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_329
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_834
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_329: # %else1234
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 9
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_330
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_835
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_330: # %else1238
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 8
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_331
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_836
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_331: # %else1242
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 7
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_332
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_837
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_332: # %else1246
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 6
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_333
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_838
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_333: # %else1250
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 5
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_334
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_839
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_334: # %else1254
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 4
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_335
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_840
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_335: # %else1258
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 3
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_336
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_841
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_336: # %else1262
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 2
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_338
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_337: # %cond.load1265
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 318
+; CHECK-INDEXED-RV32-NEXT:    li a4, 317
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_338: # %else1266
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 1
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vslidedown.vi v16, v0, 5
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_340
+; CHECK-INDEXED-RV32-NEXT:  # %bb.339: # %cond.load1269
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 319
+; CHECK-INDEXED-RV32-NEXT:    li a4, 318
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_340: # %else1270
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a3, v16
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_341
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_842
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_341: # %else1274
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_342
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_843
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_342: # %else1278
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 2
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_343
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_844
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_343: # %else1282
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 4
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_344
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_845
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_344: # %else1286
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 8
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_345
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_846
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_345: # %else1290
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 16
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_346
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_847
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_346: # %else1294
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 32
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_347
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_848
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_347: # %else1298
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 64
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_348
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_849
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_348: # %else1302
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 128
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_349
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_850
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_349: # %else1306
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 256
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_350
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_851
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_350: # %else1310
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 512
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_351
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_852
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_351: # %else1314
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_352
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_853
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_352: # %else1318
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 20
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_353
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_854
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_353: # %else1322
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 19
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_354
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_855
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_354: # %else1326
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 18
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_355
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_856
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_355: # %else1330
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 17
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_356
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_857
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_356: # %else1334
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 16
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_357
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_858
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_357: # %else1338
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 15
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_358
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_859
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_358: # %else1342
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 14
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_359
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_860
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_359: # %else1346
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 13
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_360
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_861
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_360: # %else1350
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 12
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_361
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_862
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_361: # %else1354
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 11
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_362
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_863
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_362: # %else1358
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 10
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_363
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_864
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_363: # %else1362
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 9
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_364
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_865
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_364: # %else1366
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 8
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_365
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_866
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_365: # %else1370
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 7
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_366
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_867
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_366: # %else1374
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 6
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_367
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_868
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_367: # %else1378
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 5
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_368
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_869
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_368: # %else1382
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 4
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_369
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_870
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_369: # %else1386
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 3
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_370
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_871
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_370: # %else1390
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 2
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_372
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_371: # %cond.load1393
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 350
+; CHECK-INDEXED-RV32-NEXT:    li a4, 349
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_372: # %else1394
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 1
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vsrl.vx v16, v16, a1
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_374
+; CHECK-INDEXED-RV32-NEXT:  # %bb.373: # %cond.load1397
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 351
+; CHECK-INDEXED-RV32-NEXT:    li a4, 350
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_374: # %else1398
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a2, v16
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_375
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_872
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_375: # %else1402
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_376
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_873
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_376: # %else1406
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 2
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_377
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_874
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_377: # %else1410
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 4
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_378
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_875
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_378: # %else1414
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 8
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_379
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_876
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_379: # %else1418
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 16
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_380
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_877
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_380: # %else1422
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 32
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_381
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_878
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_381: # %else1426
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 64
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_382
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_879
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_382: # %else1430
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 128
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_383
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_880
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_383: # %else1434
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 256
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_384
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_881
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_384: # %else1438
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 512
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_385
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_882
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_385: # %else1442
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_386
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_883
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_386: # %else1446
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 20
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_387
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_884
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_387: # %else1450
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 19
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_388
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_885
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_388: # %else1454
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 18
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_389
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_886
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_389: # %else1458
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 17
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_390
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_887
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_390: # %else1462
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 16
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_391
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_888
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_391: # %else1466
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 15
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_392
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_889
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_392: # %else1470
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 14
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_393
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_890
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_393: # %else1474
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 13
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_394
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_891
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_394: # %else1478
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 12
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_395
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_892
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_395: # %else1482
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 11
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_396
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_893
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_396: # %else1486
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 10
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_397
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_894
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_397: # %else1490
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 9
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_398
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_895
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_398: # %else1494
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 8
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_399
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_896
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_399: # %else1498
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 7
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_400
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_897
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_400: # %else1502
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 6
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_401
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_898
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_401: # %else1506
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 5
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_402
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_899
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_402: # %else1510
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 4
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_403
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_900
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_403: # %else1514
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 3
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_404
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_901
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_404: # %else1518
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 2
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_406
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_405: # %cond.load1521
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 382
+; CHECK-INDEXED-RV32-NEXT:    li a4, 381
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_406: # %else1522
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 1
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vslidedown.vi v16, v0, 6
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_408
+; CHECK-INDEXED-RV32-NEXT:  # %bb.407: # %cond.load1525
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 383
+; CHECK-INDEXED-RV32-NEXT:    li a4, 382
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_408: # %else1526
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a3, v16
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_409
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_902
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_409: # %else1530
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_410
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_903
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_410: # %else1534
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 2
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_411
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_904
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_411: # %else1538
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 4
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_412
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_905
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_412: # %else1542
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 8
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_413
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_906
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_413: # %else1546
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 16
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_414
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_907
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_414: # %else1550
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 32
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_415
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_908
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_415: # %else1554
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 64
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_416
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_909
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_416: # %else1558
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 128
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_417
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_910
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_417: # %else1562
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 256
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_418
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_911
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_418: # %else1566
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 512
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_419
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_912
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_419: # %else1570
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_420
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_913
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_420: # %else1574
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 20
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_421
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_914
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_421: # %else1578
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 19
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_422
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_915
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_422: # %else1582
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 18
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_423
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_916
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_423: # %else1586
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 17
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_424
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_917
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_424: # %else1590
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 16
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_425
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_918
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_425: # %else1594
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 15
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_426
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_919
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_426: # %else1598
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 14
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_427
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_920
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_427: # %else1602
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 13
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_428
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_921
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_428: # %else1606
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 12
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_429
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_922
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_429: # %else1610
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 11
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_430
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_923
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_430: # %else1614
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 10
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_431
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_924
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_431: # %else1618
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 9
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_432
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_925
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_432: # %else1622
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 8
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_433
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_926
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_433: # %else1626
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 7
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_434
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_927
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_434: # %else1630
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 6
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_435
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_928
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_435: # %else1634
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 5
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_436
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_929
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_436: # %else1638
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 4
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_437
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_930
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_437: # %else1642
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 3
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_438
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_931
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_438: # %else1646
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 2
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_440
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_439: # %cond.load1649
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 414
+; CHECK-INDEXED-RV32-NEXT:    li a4, 413
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_440: # %else1650
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 1
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vsrl.vx v16, v16, a1
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_442
+; CHECK-INDEXED-RV32-NEXT:  # %bb.441: # %cond.load1653
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 415
+; CHECK-INDEXED-RV32-NEXT:    li a4, 414
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_442: # %else1654
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a2, v16
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_443
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_932
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_443: # %else1658
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_444
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_933
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_444: # %else1662
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 2
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_445
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_934
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_445: # %else1666
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 4
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_446
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_935
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_446: # %else1670
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 8
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_447
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_936
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_447: # %else1674
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 16
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_448
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_937
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_448: # %else1678
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 32
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_449
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_938
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_449: # %else1682
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 64
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_450
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_939
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_450: # %else1686
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 128
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_451
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_940
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_451: # %else1690
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 256
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_452
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_941
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_452: # %else1694
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 512
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_453
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_942
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_453: # %else1698
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_454
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_943
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_454: # %else1702
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 20
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_455
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_944
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_455: # %else1706
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 19
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_456
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_945
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_456: # %else1710
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 18
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_457
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_946
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_457: # %else1714
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 17
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_458
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_947
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_458: # %else1718
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 16
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_459
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_948
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_459: # %else1722
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 15
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_460
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_949
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_460: # %else1726
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 14
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_461
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_950
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_461: # %else1730
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 13
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_462
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_951
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_462: # %else1734
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 12
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_463
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_952
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_463: # %else1738
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 11
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_464
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_953
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_464: # %else1742
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 10
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_465
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_954
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_465: # %else1746
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 9
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_466
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_955
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_466: # %else1750
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 8
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_467
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_956
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_467: # %else1754
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 7
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_468
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_957
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_468: # %else1758
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 6
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_469
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_958
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_469: # %else1762
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 5
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_470
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_959
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_470: # %else1766
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 4
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_471
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_960
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_471: # %else1770
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 3
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_472
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_961
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_472: # %else1774
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 2
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_474
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_473: # %cond.load1777
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 446
+; CHECK-INDEXED-RV32-NEXT:    li a4, 445
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_474: # %else1778
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 1
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vslidedown.vi v16, v0, 7
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_476
+; CHECK-INDEXED-RV32-NEXT:  # %bb.475: # %cond.load1781
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 447
+; CHECK-INDEXED-RV32-NEXT:    li a4, 446
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_476: # %else1782
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a3, v16
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_477
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_962
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_477: # %else1786
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_478
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_963
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_478: # %else1790
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 2
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_479
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_964
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_479: # %else1794
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 4
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_480
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_965
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_480: # %else1798
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 8
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_481
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_966
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_481: # %else1802
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 16
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_482
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_967
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_482: # %else1806
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 32
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_483
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_968
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_483: # %else1810
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 64
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_484
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_969
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_484: # %else1814
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 128
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_485
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_970
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_485: # %else1818
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 256
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_486
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_971
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_486: # %else1822
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 512
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_487
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_972
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_487: # %else1826
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_488
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_973
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_488: # %else1830
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 20
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_489
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_974
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_489: # %else1834
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 19
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_490
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_975
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_490: # %else1838
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 18
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_491
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_976
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_491: # %else1842
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 17
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_492
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_977
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_492: # %else1846
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 16
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_493
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_978
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_493: # %else1850
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 15
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_494
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_979
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_494: # %else1854
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 14
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_495
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_980
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_495: # %else1858
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 13
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_496
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_981
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_496: # %else1862
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 12
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_497
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_982
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_497: # %else1866
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 11
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_498
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_983
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_498: # %else1870
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 10
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_499
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_984
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_499: # %else1874
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 9
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_500
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_985
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_500: # %else1878
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 8
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_501
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_986
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_501: # %else1882
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 7
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_502
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_987
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_502: # %else1886
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 6
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_503
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_988
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_503: # %else1890
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 5
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_504
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_989
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_504: # %else1894
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 4
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_505
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_990
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_505: # %else1898
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 3
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_506
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_991
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_506: # %else1902
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 2
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_508
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_507: # %cond.load1905
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 478
+; CHECK-INDEXED-RV32-NEXT:    li a4, 477
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_508: # %else1906
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 1
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vsrl.vx v16, v16, a1
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_510
+; CHECK-INDEXED-RV32-NEXT:  # %bb.509: # %cond.load1909
+; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a1
+; CHECK-INDEXED-RV32-NEXT:    li a1, 479
+; CHECK-INDEXED-RV32-NEXT:    li a2, 478
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a2
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_510: # %else1910
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a1, v16
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_511
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_992
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_511: # %else1914
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 1
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_512
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_993
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_512: # %else1918
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 2
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_513
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_994
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_513: # %else1922
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 4
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_514
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_995
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_514: # %else1926
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 8
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_515
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_996
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_515: # %else1930
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 16
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_516
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_997
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_516: # %else1934
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 32
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_517
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_998
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_517: # %else1938
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 64
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_518
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_999
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_518: # %else1942
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 128
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_519
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1000
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_519: # %else1946
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 256
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_520
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1001
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_520: # %else1950
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 512
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_521
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1002
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_521: # %else1954
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 1024
+; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_522
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1003
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_522: # %else1958
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 20
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_523
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1004
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_523: # %else1962
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 19
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_524
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1005
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_524: # %else1966
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 18
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_525
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1006
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_525: # %else1970
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 17
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_526
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1007
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_526: # %else1974
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 16
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_527
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1008
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_527: # %else1978
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 15
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_528
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1009
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_528: # %else1982
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 14
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_529
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1010
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_529: # %else1986
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 13
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_530
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1011
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_530: # %else1990
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 12
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_531
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1012
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_531: # %else1994
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 11
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_532
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1013
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_532: # %else1998
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 10
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_533
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1014
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_533: # %else2002
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 9
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_534
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1015
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_534: # %else2006
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 8
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_535
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1016
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_535: # %else2010
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 7
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_536
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1017
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_536: # %else2014
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 6
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_537
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1018
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_537: # %else2018
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 5
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_538
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1019
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_538: # %else2022
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 4
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_539
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1020
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_539: # %else2026
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 3
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_540
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1021
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_540: # %else2030
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 2
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_541
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1022
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_541: # %else2034
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 1
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_542
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1023
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_542: # %else2038
+; CHECK-INDEXED-RV32-NEXT:    bgez a1, .LBB61_543
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1024
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_543: # %else2042
+; CHECK-INDEXED-RV32-NEXT:    ret
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_544: # %cond.load
+; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e8, mf8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v8, a1
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 2
+; CHECK-INDEXED-RV32-NEXT:    bnez a1, .LBB61_545
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_2
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_545: # %cond.load1
+; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e8, mf8, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 2, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 1
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 4
+; CHECK-INDEXED-RV32-NEXT:    bnez a1, .LBB61_546
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_3
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_546: # %cond.load5
+; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 3, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 2
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 8
+; CHECK-INDEXED-RV32-NEXT:    bnez a1, .LBB61_547
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_4
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_547: # %cond.load9
+; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 4, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 3
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 16
+; CHECK-INDEXED-RV32-NEXT:    bnez a1, .LBB61_548
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_5
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_548: # %cond.load13
+; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 5, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 32
+; CHECK-INDEXED-RV32-NEXT:    bnez a1, .LBB61_549
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_6
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_549: # %cond.load17
+; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 6, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 5
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 64
+; CHECK-INDEXED-RV32-NEXT:    bnez a1, .LBB61_550
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_7
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_550: # %cond.load21
+; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 7, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 6
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 128
+; CHECK-INDEXED-RV32-NEXT:    bnez a1, .LBB61_551
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_8
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_551: # %cond.load25
+; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 8, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 7
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 256
+; CHECK-INDEXED-RV32-NEXT:    bnez a1, .LBB61_552
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_9
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_552: # %cond.load29
+; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 9, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 8
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 512
+; CHECK-INDEXED-RV32-NEXT:    bnez a1, .LBB61_553
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_10
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_553: # %cond.load33
+; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 10, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 9
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 1024
+; CHECK-INDEXED-RV32-NEXT:    bnez a1, .LBB61_554
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_11
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_554: # %cond.load37
+; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 11, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 10
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 20
+; CHECK-INDEXED-RV32-NEXT:    bltz a1, .LBB61_555
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_12
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_555: # %cond.load41
+; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 12, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 11
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 19
+; CHECK-INDEXED-RV32-NEXT:    bltz a1, .LBB61_556
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_13
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_556: # %cond.load45
+; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 13, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 12
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 18
+; CHECK-INDEXED-RV32-NEXT:    bltz a1, .LBB61_557
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_14
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_557: # %cond.load49
+; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 14, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 13
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 17
+; CHECK-INDEXED-RV32-NEXT:    bltz a1, .LBB61_558
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_15
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_558: # %cond.load53
+; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 15, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 14
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 16
+; CHECK-INDEXED-RV32-NEXT:    bltz a1, .LBB61_559
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_16
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_559: # %cond.load57
+; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 16, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 15
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 15
+; CHECK-INDEXED-RV32-NEXT:    bltz a1, .LBB61_560
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_17
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_560: # %cond.load61
+; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 17, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 16
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 14
+; CHECK-INDEXED-RV32-NEXT:    bltz a1, .LBB61_561
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_18
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_561: # %cond.load65
+; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 18, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 17
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 13
+; CHECK-INDEXED-RV32-NEXT:    bltz a1, .LBB61_562
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_19
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_562: # %cond.load69
+; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 19, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 18
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 12
+; CHECK-INDEXED-RV32-NEXT:    bltz a1, .LBB61_563
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_20
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_563: # %cond.load73
+; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 20, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 19
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 11
+; CHECK-INDEXED-RV32-NEXT:    bltz a1, .LBB61_564
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_21
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_564: # %cond.load77
+; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 21, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 20
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 10
+; CHECK-INDEXED-RV32-NEXT:    bltz a1, .LBB61_565
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_22
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_565: # %cond.load81
+; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 22, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 21
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 9
+; CHECK-INDEXED-RV32-NEXT:    bltz a1, .LBB61_566
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_23
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_566: # %cond.load85
+; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 23, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 22
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 8
+; CHECK-INDEXED-RV32-NEXT:    bltz a1, .LBB61_567
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_24
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_567: # %cond.load89
+; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 24, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 23
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 7
+; CHECK-INDEXED-RV32-NEXT:    bltz a1, .LBB61_568
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_25
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_568: # %cond.load93
+; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 25, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 24
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 6
+; CHECK-INDEXED-RV32-NEXT:    bltz a1, .LBB61_569
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_26
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_569: # %cond.load97
+; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 26, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 25
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 5
+; CHECK-INDEXED-RV32-NEXT:    bltz a1, .LBB61_570
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_27
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_570: # %cond.load101
+; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 27, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 26
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 4
+; CHECK-INDEXED-RV32-NEXT:    bltz a1, .LBB61_571
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_28
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_571: # %cond.load105
+; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 28, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 27
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 3
+; CHECK-INDEXED-RV32-NEXT:    bgez a1, .LBB61_1025
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_29
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1025: # %cond.load105
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_30
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_572: # %cond.load121
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 32
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v24, 31
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_573
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_36
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_573: # %cond.load125
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 33
+; CHECK-INDEXED-RV32-NEXT:    li a4, 32
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 2
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_574
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_37
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_574: # %cond.load129
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 34
+; CHECK-INDEXED-RV32-NEXT:    li a4, 33
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 4
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_575
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_38
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_575: # %cond.load133
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 35
+; CHECK-INDEXED-RV32-NEXT:    li a4, 34
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 8
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_576
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_39
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_576: # %cond.load137
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 36
+; CHECK-INDEXED-RV32-NEXT:    li a4, 35
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 16
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_577
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_40
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_577: # %cond.load141
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 37
+; CHECK-INDEXED-RV32-NEXT:    li a4, 36
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 32
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_578
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_41
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_578: # %cond.load145
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 38
+; CHECK-INDEXED-RV32-NEXT:    li a4, 37
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 64
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_579
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_42
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_579: # %cond.load149
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 39
+; CHECK-INDEXED-RV32-NEXT:    li a4, 38
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 128
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_580
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_43
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_580: # %cond.load153
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 40
+; CHECK-INDEXED-RV32-NEXT:    li a4, 39
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 256
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_581
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_44
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_581: # %cond.load157
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 41
+; CHECK-INDEXED-RV32-NEXT:    li a4, 40
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 512
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_582
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_45
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_582: # %cond.load161
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 42
+; CHECK-INDEXED-RV32-NEXT:    li a4, 41
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_583
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_46
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_583: # %cond.load165
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 43
+; CHECK-INDEXED-RV32-NEXT:    li a4, 42
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 20
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_584
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_47
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_584: # %cond.load169
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 44
+; CHECK-INDEXED-RV32-NEXT:    li a4, 43
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 19
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_585
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_48
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_585: # %cond.load173
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 45
+; CHECK-INDEXED-RV32-NEXT:    li a4, 44
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 18
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_586
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_49
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_586: # %cond.load177
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 46
+; CHECK-INDEXED-RV32-NEXT:    li a4, 45
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 17
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_587
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_50
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_587: # %cond.load181
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 47
+; CHECK-INDEXED-RV32-NEXT:    li a4, 46
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 16
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_588
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_51
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_588: # %cond.load185
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 48
+; CHECK-INDEXED-RV32-NEXT:    li a4, 47
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 15
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_589
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_52
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_589: # %cond.load189
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 49
+; CHECK-INDEXED-RV32-NEXT:    li a4, 48
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 14
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_590
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_53
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_590: # %cond.load193
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 50
+; CHECK-INDEXED-RV32-NEXT:    li a4, 49
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 13
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_591
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_54
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_591: # %cond.load197
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 51
+; CHECK-INDEXED-RV32-NEXT:    li a4, 50
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 12
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_592
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_55
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_592: # %cond.load201
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 52
+; CHECK-INDEXED-RV32-NEXT:    li a4, 51
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 11
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_593
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_56
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_593: # %cond.load205
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 53
+; CHECK-INDEXED-RV32-NEXT:    li a4, 52
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 10
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_594
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_57
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_594: # %cond.load209
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 54
+; CHECK-INDEXED-RV32-NEXT:    li a4, 53
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 9
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_595
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_58
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_595: # %cond.load213
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 55
+; CHECK-INDEXED-RV32-NEXT:    li a4, 54
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 8
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_596
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_59
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_596: # %cond.load217
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 56
+; CHECK-INDEXED-RV32-NEXT:    li a4, 55
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 7
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_597
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_60
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_597: # %cond.load221
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 57
+; CHECK-INDEXED-RV32-NEXT:    li a4, 56
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 6
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_598
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_61
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_598: # %cond.load225
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 58
+; CHECK-INDEXED-RV32-NEXT:    li a4, 57
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 5
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_599
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_62
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_599: # %cond.load229
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 59
+; CHECK-INDEXED-RV32-NEXT:    li a4, 58
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 4
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_600
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_63
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_600: # %cond.load233
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 60
+; CHECK-INDEXED-RV32-NEXT:    li a4, 59
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 3
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_601
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_64
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_601: # %cond.load237
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 61
+; CHECK-INDEXED-RV32-NEXT:    li a4, 60
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 2
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_1026
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_65
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1026: # %cond.load237
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_66
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_602: # %cond.load249
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v17, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 64
+; CHECK-INDEXED-RV32-NEXT:    li a4, 63
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m1, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v17, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_603
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_70
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_603: # %cond.load253
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 65
+; CHECK-INDEXED-RV32-NEXT:    li a4, 64
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 2
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_604
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_71
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_604: # %cond.load257
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 66
+; CHECK-INDEXED-RV32-NEXT:    li a4, 65
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 4
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_605
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_72
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_605: # %cond.load261
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 67
+; CHECK-INDEXED-RV32-NEXT:    li a4, 66
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 8
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_606
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_73
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_606: # %cond.load265
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 68
+; CHECK-INDEXED-RV32-NEXT:    li a4, 67
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 16
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_607
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_74
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_607: # %cond.load269
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 69
+; CHECK-INDEXED-RV32-NEXT:    li a4, 68
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 32
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_608
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_75
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_608: # %cond.load273
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 70
+; CHECK-INDEXED-RV32-NEXT:    li a4, 69
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 64
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_609
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_76
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_609: # %cond.load277
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 71
+; CHECK-INDEXED-RV32-NEXT:    li a4, 70
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 128
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_610
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_77
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_610: # %cond.load281
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 72
+; CHECK-INDEXED-RV32-NEXT:    li a4, 71
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 256
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_611
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_78
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_611: # %cond.load285
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 73
+; CHECK-INDEXED-RV32-NEXT:    li a4, 72
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 512
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_612
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_79
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_612: # %cond.load289
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 74
+; CHECK-INDEXED-RV32-NEXT:    li a4, 73
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_613
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_80
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_613: # %cond.load293
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 75
+; CHECK-INDEXED-RV32-NEXT:    li a4, 74
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 20
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_614
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_81
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_614: # %cond.load297
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 76
+; CHECK-INDEXED-RV32-NEXT:    li a4, 75
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 19
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_615
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_82
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_615: # %cond.load301
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 77
+; CHECK-INDEXED-RV32-NEXT:    li a4, 76
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 18
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_616
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_83
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_616: # %cond.load305
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 78
+; CHECK-INDEXED-RV32-NEXT:    li a4, 77
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 17
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_617
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_84
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_617: # %cond.load309
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 79
+; CHECK-INDEXED-RV32-NEXT:    li a4, 78
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 16
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_618
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_85
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_618: # %cond.load313
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 80
+; CHECK-INDEXED-RV32-NEXT:    li a4, 79
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 15
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_619
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_86
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_619: # %cond.load317
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 81
+; CHECK-INDEXED-RV32-NEXT:    li a4, 80
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 14
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_620
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_87
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_620: # %cond.load321
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 82
+; CHECK-INDEXED-RV32-NEXT:    li a4, 81
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 13
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_621
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_88
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_621: # %cond.load325
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 83
+; CHECK-INDEXED-RV32-NEXT:    li a4, 82
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 12
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_622
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_89
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_622: # %cond.load329
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 84
+; CHECK-INDEXED-RV32-NEXT:    li a4, 83
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 11
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_623
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_90
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_623: # %cond.load333
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 85
+; CHECK-INDEXED-RV32-NEXT:    li a4, 84
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 10
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_624
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_91
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_624: # %cond.load337
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 86
+; CHECK-INDEXED-RV32-NEXT:    li a4, 85
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 9
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_625
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_92
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_625: # %cond.load341
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 87
+; CHECK-INDEXED-RV32-NEXT:    li a4, 86
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 8
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_626
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_93
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_626: # %cond.load345
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 88
+; CHECK-INDEXED-RV32-NEXT:    li a4, 87
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 7
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_627
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_94
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_627: # %cond.load349
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 89
+; CHECK-INDEXED-RV32-NEXT:    li a4, 88
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 6
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_628
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_95
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_628: # %cond.load353
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 90
+; CHECK-INDEXED-RV32-NEXT:    li a4, 89
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 5
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_629
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_96
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_629: # %cond.load357
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 91
+; CHECK-INDEXED-RV32-NEXT:    li a4, 90
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 4
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_630
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_97
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_630: # %cond.load361
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 92
+; CHECK-INDEXED-RV32-NEXT:    li a4, 91
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 3
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_631
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_98
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_631: # %cond.load365
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 93
+; CHECK-INDEXED-RV32-NEXT:    li a4, 92
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 2
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_1027
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_99
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1027: # %cond.load365
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_100
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_632: # %cond.load377
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 96
+; CHECK-INDEXED-RV32-NEXT:    li a4, 95
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_633
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_104
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_633: # %cond.load381
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 97
+; CHECK-INDEXED-RV32-NEXT:    li a4, 96
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 2
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_634
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_105
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_634: # %cond.load385
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 98
+; CHECK-INDEXED-RV32-NEXT:    li a4, 97
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 4
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_635
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_106
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_635: # %cond.load389
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 99
+; CHECK-INDEXED-RV32-NEXT:    li a4, 98
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 8
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_636
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_107
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_636: # %cond.load393
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 100
+; CHECK-INDEXED-RV32-NEXT:    li a4, 99
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 16
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_637
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_108
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_637: # %cond.load397
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 101
+; CHECK-INDEXED-RV32-NEXT:    li a4, 100
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 32
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_638
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_109
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_638: # %cond.load401
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 102
+; CHECK-INDEXED-RV32-NEXT:    li a4, 101
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 64
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_639
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_110
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_639: # %cond.load405
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 103
+; CHECK-INDEXED-RV32-NEXT:    li a4, 102
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 128
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_640
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_111
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_640: # %cond.load409
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 104
+; CHECK-INDEXED-RV32-NEXT:    li a4, 103
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 256
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_641
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_112
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_641: # %cond.load413
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 105
+; CHECK-INDEXED-RV32-NEXT:    li a4, 104
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 512
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_642
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_113
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_642: # %cond.load417
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 106
+; CHECK-INDEXED-RV32-NEXT:    li a4, 105
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_643
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_114
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_643: # %cond.load421
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 107
+; CHECK-INDEXED-RV32-NEXT:    li a4, 106
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 20
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_644
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_115
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_644: # %cond.load425
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 108
+; CHECK-INDEXED-RV32-NEXT:    li a4, 107
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 19
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_645
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_116
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_645: # %cond.load429
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 109
+; CHECK-INDEXED-RV32-NEXT:    li a4, 108
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 18
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_646
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_117
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_646: # %cond.load433
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 110
+; CHECK-INDEXED-RV32-NEXT:    li a4, 109
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 17
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_647
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_118
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_647: # %cond.load437
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 111
+; CHECK-INDEXED-RV32-NEXT:    li a4, 110
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 16
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_648
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_119
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_648: # %cond.load441
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 112
+; CHECK-INDEXED-RV32-NEXT:    li a4, 111
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 15
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_649
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_120
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_649: # %cond.load445
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 113
+; CHECK-INDEXED-RV32-NEXT:    li a4, 112
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 14
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_650
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_121
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_650: # %cond.load449
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 114
+; CHECK-INDEXED-RV32-NEXT:    li a4, 113
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 13
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_651
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_122
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_651: # %cond.load453
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 115
+; CHECK-INDEXED-RV32-NEXT:    li a4, 114
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 12
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_652
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_123
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_652: # %cond.load457
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 116
+; CHECK-INDEXED-RV32-NEXT:    li a4, 115
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 11
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_653
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_124
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_653: # %cond.load461
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 117
+; CHECK-INDEXED-RV32-NEXT:    li a4, 116
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 10
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_654
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_125
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_654: # %cond.load465
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 118
+; CHECK-INDEXED-RV32-NEXT:    li a4, 117
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 9
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_655
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_126
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_655: # %cond.load469
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 119
+; CHECK-INDEXED-RV32-NEXT:    li a4, 118
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 8
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_656
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_127
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_656: # %cond.load473
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 120
+; CHECK-INDEXED-RV32-NEXT:    li a4, 119
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 7
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_657
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_128
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_657: # %cond.load477
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 121
+; CHECK-INDEXED-RV32-NEXT:    li a4, 120
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 6
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_658
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_129
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_658: # %cond.load481
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 122
+; CHECK-INDEXED-RV32-NEXT:    li a4, 121
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 5
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_659
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_130
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_659: # %cond.load485
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 123
+; CHECK-INDEXED-RV32-NEXT:    li a4, 122
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 4
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_660
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_131
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_660: # %cond.load489
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 124
+; CHECK-INDEXED-RV32-NEXT:    li a4, 123
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 3
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_661
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_132
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_661: # %cond.load493
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 125
+; CHECK-INDEXED-RV32-NEXT:    li a4, 124
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 2
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_1028
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_133
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1028: # %cond.load493
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_134
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_662: # %cond.load505
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 128
+; CHECK-INDEXED-RV32-NEXT:    li a4, 127
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_663
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_138
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_663: # %cond.load509
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 129
+; CHECK-INDEXED-RV32-NEXT:    li a4, 128
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 2
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_664
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_139
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_664: # %cond.load513
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 130
+; CHECK-INDEXED-RV32-NEXT:    li a4, 129
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 4
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_665
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_140
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_665: # %cond.load517
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 131
+; CHECK-INDEXED-RV32-NEXT:    li a4, 130
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 8
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_666
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_141
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_666: # %cond.load521
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 132
+; CHECK-INDEXED-RV32-NEXT:    li a4, 131
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 16
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_667
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_142
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_667: # %cond.load525
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 133
+; CHECK-INDEXED-RV32-NEXT:    li a4, 132
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 32
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_668
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_143
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_668: # %cond.load529
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 134
+; CHECK-INDEXED-RV32-NEXT:    li a4, 133
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 64
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_669
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_144
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_669: # %cond.load533
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 135
+; CHECK-INDEXED-RV32-NEXT:    li a4, 134
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 128
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_670
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_145
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_670: # %cond.load537
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 136
+; CHECK-INDEXED-RV32-NEXT:    li a4, 135
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 256
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_671
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_146
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_671: # %cond.load541
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 137
+; CHECK-INDEXED-RV32-NEXT:    li a4, 136
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 512
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_672
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_147
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_672: # %cond.load545
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 138
+; CHECK-INDEXED-RV32-NEXT:    li a4, 137
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_673
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_148
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_673: # %cond.load549
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 139
+; CHECK-INDEXED-RV32-NEXT:    li a4, 138
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 20
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_674
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_149
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_674: # %cond.load553
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 140
+; CHECK-INDEXED-RV32-NEXT:    li a4, 139
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 19
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_675
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_150
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_675: # %cond.load557
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 141
+; CHECK-INDEXED-RV32-NEXT:    li a4, 140
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 18
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_676
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_151
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_676: # %cond.load561
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 142
+; CHECK-INDEXED-RV32-NEXT:    li a4, 141
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 17
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_677
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_152
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_677: # %cond.load565
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 143
+; CHECK-INDEXED-RV32-NEXT:    li a4, 142
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 16
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_678
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_153
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_678: # %cond.load569
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 144
+; CHECK-INDEXED-RV32-NEXT:    li a4, 143
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 15
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_679
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_154
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_679: # %cond.load573
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 145
+; CHECK-INDEXED-RV32-NEXT:    li a4, 144
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 14
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_680
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_155
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_680: # %cond.load577
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 146
+; CHECK-INDEXED-RV32-NEXT:    li a4, 145
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 13
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_681
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_156
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_681: # %cond.load581
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 147
+; CHECK-INDEXED-RV32-NEXT:    li a4, 146
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 12
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_682
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_157
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_682: # %cond.load585
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 148
+; CHECK-INDEXED-RV32-NEXT:    li a4, 147
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 11
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_683
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_158
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_683: # %cond.load589
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 149
+; CHECK-INDEXED-RV32-NEXT:    li a4, 148
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 10
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_684
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_159
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_684: # %cond.load593
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 150
+; CHECK-INDEXED-RV32-NEXT:    li a4, 149
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 9
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_685
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_160
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_685: # %cond.load597
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 151
+; CHECK-INDEXED-RV32-NEXT:    li a4, 150
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 8
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_686
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_161
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_686: # %cond.load601
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 152
+; CHECK-INDEXED-RV32-NEXT:    li a4, 151
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 7
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_687
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_162
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_687: # %cond.load605
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 153
+; CHECK-INDEXED-RV32-NEXT:    li a4, 152
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 6
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_688
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_163
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_688: # %cond.load609
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 154
+; CHECK-INDEXED-RV32-NEXT:    li a4, 153
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 5
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_689
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_164
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_689: # %cond.load613
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 155
+; CHECK-INDEXED-RV32-NEXT:    li a4, 154
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 4
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_690
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_165
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_690: # %cond.load617
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 156
+; CHECK-INDEXED-RV32-NEXT:    li a4, 155
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 3
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_691
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_166
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_691: # %cond.load621
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 157
+; CHECK-INDEXED-RV32-NEXT:    li a4, 156
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 2
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_1029
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_167
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1029: # %cond.load621
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_168
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_692: # %cond.load633
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 160
+; CHECK-INDEXED-RV32-NEXT:    li a4, 159
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_693
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_172
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_693: # %cond.load637
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 161
+; CHECK-INDEXED-RV32-NEXT:    li a4, 160
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 2
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_694
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_173
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_694: # %cond.load641
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 162
+; CHECK-INDEXED-RV32-NEXT:    li a4, 161
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 4
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_695
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_174
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_695: # %cond.load645
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 163
+; CHECK-INDEXED-RV32-NEXT:    li a4, 162
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 8
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_696
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_175
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_696: # %cond.load649
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 164
+; CHECK-INDEXED-RV32-NEXT:    li a4, 163
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 16
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_697
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_176
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_697: # %cond.load653
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 165
+; CHECK-INDEXED-RV32-NEXT:    li a4, 164
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 32
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_698
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_177
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_698: # %cond.load657
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 166
+; CHECK-INDEXED-RV32-NEXT:    li a4, 165
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 64
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_699
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_178
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_699: # %cond.load661
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 167
+; CHECK-INDEXED-RV32-NEXT:    li a4, 166
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 128
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_700
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_179
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_700: # %cond.load665
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 168
+; CHECK-INDEXED-RV32-NEXT:    li a4, 167
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 256
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_701
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_180
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_701: # %cond.load669
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 169
+; CHECK-INDEXED-RV32-NEXT:    li a4, 168
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 512
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_702
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_181
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_702: # %cond.load673
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 170
+; CHECK-INDEXED-RV32-NEXT:    li a4, 169
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_703
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_182
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_703: # %cond.load677
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 171
+; CHECK-INDEXED-RV32-NEXT:    li a4, 170
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 20
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_704
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_183
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_704: # %cond.load681
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 172
+; CHECK-INDEXED-RV32-NEXT:    li a4, 171
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 19
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_705
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_184
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_705: # %cond.load685
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 173
+; CHECK-INDEXED-RV32-NEXT:    li a4, 172
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 18
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_706
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_185
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_706: # %cond.load689
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 174
+; CHECK-INDEXED-RV32-NEXT:    li a4, 173
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 17
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_707
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_186
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_707: # %cond.load693
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 175
+; CHECK-INDEXED-RV32-NEXT:    li a4, 174
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 16
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_708
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_187
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_708: # %cond.load697
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 176
+; CHECK-INDEXED-RV32-NEXT:    li a4, 175
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 15
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_709
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_188
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_709: # %cond.load701
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 177
+; CHECK-INDEXED-RV32-NEXT:    li a4, 176
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 14
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_710
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_189
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_710: # %cond.load705
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 178
+; CHECK-INDEXED-RV32-NEXT:    li a4, 177
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 13
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_711
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_190
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_711: # %cond.load709
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 179
+; CHECK-INDEXED-RV32-NEXT:    li a4, 178
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 12
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_712
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_191
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_712: # %cond.load713
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 180
+; CHECK-INDEXED-RV32-NEXT:    li a4, 179
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 11
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_713
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_192
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_713: # %cond.load717
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 181
+; CHECK-INDEXED-RV32-NEXT:    li a4, 180
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 10
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_714
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_193
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_714: # %cond.load721
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 182
+; CHECK-INDEXED-RV32-NEXT:    li a4, 181
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 9
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_715
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_194
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_715: # %cond.load725
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 183
+; CHECK-INDEXED-RV32-NEXT:    li a4, 182
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 8
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_716
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_195
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_716: # %cond.load729
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 184
+; CHECK-INDEXED-RV32-NEXT:    li a4, 183
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 7
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_717
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_196
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_717: # %cond.load733
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 185
+; CHECK-INDEXED-RV32-NEXT:    li a4, 184
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 6
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_718
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_197
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_718: # %cond.load737
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 186
+; CHECK-INDEXED-RV32-NEXT:    li a4, 185
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 5
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_719
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_198
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_719: # %cond.load741
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 187
+; CHECK-INDEXED-RV32-NEXT:    li a4, 186
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 4
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_720
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_199
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_720: # %cond.load745
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 188
+; CHECK-INDEXED-RV32-NEXT:    li a4, 187
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 3
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_721
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_200
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_721: # %cond.load749
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 189
+; CHECK-INDEXED-RV32-NEXT:    li a4, 188
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 2
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_1030
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_201
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1030: # %cond.load749
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_202
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_722: # %cond.load761
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 192
+; CHECK-INDEXED-RV32-NEXT:    li a4, 191
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_723
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_206
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_723: # %cond.load765
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 193
+; CHECK-INDEXED-RV32-NEXT:    li a4, 192
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 2
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_724
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_207
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_724: # %cond.load769
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 194
+; CHECK-INDEXED-RV32-NEXT:    li a4, 193
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 4
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_725
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_208
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_725: # %cond.load773
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 195
+; CHECK-INDEXED-RV32-NEXT:    li a4, 194
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 8
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_726
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_209
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_726: # %cond.load777
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 196
+; CHECK-INDEXED-RV32-NEXT:    li a4, 195
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 16
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_727
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_210
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_727: # %cond.load781
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 197
+; CHECK-INDEXED-RV32-NEXT:    li a4, 196
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 32
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_728
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_211
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_728: # %cond.load785
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 198
+; CHECK-INDEXED-RV32-NEXT:    li a4, 197
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 64
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_729
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_212
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_729: # %cond.load789
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 199
+; CHECK-INDEXED-RV32-NEXT:    li a4, 198
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 128
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_730
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_213
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_730: # %cond.load793
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 200
+; CHECK-INDEXED-RV32-NEXT:    li a4, 199
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 256
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_731
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_214
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_731: # %cond.load797
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 201
+; CHECK-INDEXED-RV32-NEXT:    li a4, 200
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 512
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_732
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_215
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_732: # %cond.load801
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 202
+; CHECK-INDEXED-RV32-NEXT:    li a4, 201
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_733
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_216
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_733: # %cond.load805
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 203
+; CHECK-INDEXED-RV32-NEXT:    li a4, 202
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 20
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_734
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_217
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_734: # %cond.load809
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 204
+; CHECK-INDEXED-RV32-NEXT:    li a4, 203
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 19
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_735
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_218
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_735: # %cond.load813
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 205
+; CHECK-INDEXED-RV32-NEXT:    li a4, 204
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 18
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_736
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_219
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_736: # %cond.load817
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 206
+; CHECK-INDEXED-RV32-NEXT:    li a4, 205
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 17
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_737
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_220
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_737: # %cond.load821
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 207
+; CHECK-INDEXED-RV32-NEXT:    li a4, 206
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 16
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_738
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_221
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_738: # %cond.load825
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 208
+; CHECK-INDEXED-RV32-NEXT:    li a4, 207
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 15
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_739
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_222
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_739: # %cond.load829
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 209
+; CHECK-INDEXED-RV32-NEXT:    li a4, 208
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 14
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_740
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_223
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_740: # %cond.load833
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 210
+; CHECK-INDEXED-RV32-NEXT:    li a4, 209
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 13
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_741
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_224
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_741: # %cond.load837
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 211
+; CHECK-INDEXED-RV32-NEXT:    li a4, 210
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 12
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_742
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_225
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_742: # %cond.load841
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 212
+; CHECK-INDEXED-RV32-NEXT:    li a4, 211
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 11
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_743
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_226
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_743: # %cond.load845
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 213
+; CHECK-INDEXED-RV32-NEXT:    li a4, 212
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 10
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_744
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_227
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_744: # %cond.load849
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 214
+; CHECK-INDEXED-RV32-NEXT:    li a4, 213
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 9
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_745
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_228
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_745: # %cond.load853
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 215
+; CHECK-INDEXED-RV32-NEXT:    li a4, 214
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 8
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_746
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_229
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_746: # %cond.load857
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 216
+; CHECK-INDEXED-RV32-NEXT:    li a4, 215
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 7
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_747
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_230
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_747: # %cond.load861
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 217
+; CHECK-INDEXED-RV32-NEXT:    li a4, 216
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 6
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_748
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_231
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_748: # %cond.load865
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 218
+; CHECK-INDEXED-RV32-NEXT:    li a4, 217
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 5
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_749
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_232
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_749: # %cond.load869
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 219
+; CHECK-INDEXED-RV32-NEXT:    li a4, 218
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 4
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_750
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_233
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_750: # %cond.load873
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 220
+; CHECK-INDEXED-RV32-NEXT:    li a4, 219
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 3
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_751
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_234
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_751: # %cond.load877
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 221
+; CHECK-INDEXED-RV32-NEXT:    li a4, 220
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 2
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_1031
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_235
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1031: # %cond.load877
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_236
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_752: # %cond.load889
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 224
+; CHECK-INDEXED-RV32-NEXT:    li a4, 223
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_753
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_240
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_753: # %cond.load893
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 225
+; CHECK-INDEXED-RV32-NEXT:    li a4, 224
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 2
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_754
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_241
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_754: # %cond.load897
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 226
+; CHECK-INDEXED-RV32-NEXT:    li a4, 225
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 4
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_755
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_242
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_755: # %cond.load901
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 227
+; CHECK-INDEXED-RV32-NEXT:    li a4, 226
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 8
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_756
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_243
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_756: # %cond.load905
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 228
+; CHECK-INDEXED-RV32-NEXT:    li a4, 227
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 16
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_757
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_244
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_757: # %cond.load909
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 229
+; CHECK-INDEXED-RV32-NEXT:    li a4, 228
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 32
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_758
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_245
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_758: # %cond.load913
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 230
+; CHECK-INDEXED-RV32-NEXT:    li a4, 229
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 64
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_759
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_246
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_759: # %cond.load917
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 231
+; CHECK-INDEXED-RV32-NEXT:    li a4, 230
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 128
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_760
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_247
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_760: # %cond.load921
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 232
+; CHECK-INDEXED-RV32-NEXT:    li a4, 231
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 256
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_761
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_248
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_761: # %cond.load925
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 233
+; CHECK-INDEXED-RV32-NEXT:    li a4, 232
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 512
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_762
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_249
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_762: # %cond.load929
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 234
+; CHECK-INDEXED-RV32-NEXT:    li a4, 233
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_763
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_250
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_763: # %cond.load933
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 235
+; CHECK-INDEXED-RV32-NEXT:    li a4, 234
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 20
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_764
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_251
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_764: # %cond.load937
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 236
+; CHECK-INDEXED-RV32-NEXT:    li a4, 235
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 19
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_765
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_252
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_765: # %cond.load941
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 237
+; CHECK-INDEXED-RV32-NEXT:    li a4, 236
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 18
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_766
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_253
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_766: # %cond.load945
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 238
+; CHECK-INDEXED-RV32-NEXT:    li a4, 237
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 17
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_767
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_254
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_767: # %cond.load949
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 239
+; CHECK-INDEXED-RV32-NEXT:    li a4, 238
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 16
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_768
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_255
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_768: # %cond.load953
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 240
+; CHECK-INDEXED-RV32-NEXT:    li a4, 239
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 15
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_769
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_256
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_769: # %cond.load957
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 241
+; CHECK-INDEXED-RV32-NEXT:    li a4, 240
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 14
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_770
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_257
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_770: # %cond.load961
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 242
+; CHECK-INDEXED-RV32-NEXT:    li a4, 241
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 13
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_771
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_258
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_771: # %cond.load965
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 243
+; CHECK-INDEXED-RV32-NEXT:    li a4, 242
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 12
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_772
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_259
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_772: # %cond.load969
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 244
+; CHECK-INDEXED-RV32-NEXT:    li a4, 243
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 11
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_773
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_260
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_773: # %cond.load973
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 245
+; CHECK-INDEXED-RV32-NEXT:    li a4, 244
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 10
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_774
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_261
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_774: # %cond.load977
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 246
+; CHECK-INDEXED-RV32-NEXT:    li a4, 245
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 9
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_775
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_262
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_775: # %cond.load981
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 247
+; CHECK-INDEXED-RV32-NEXT:    li a4, 246
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 8
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_776
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_263
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_776: # %cond.load985
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 248
+; CHECK-INDEXED-RV32-NEXT:    li a4, 247
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 7
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_777
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_264
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_777: # %cond.load989
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 249
+; CHECK-INDEXED-RV32-NEXT:    li a4, 248
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 6
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_778
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_265
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_778: # %cond.load993
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 250
+; CHECK-INDEXED-RV32-NEXT:    li a4, 249
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 5
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_779
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_266
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_779: # %cond.load997
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 251
+; CHECK-INDEXED-RV32-NEXT:    li a4, 250
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 4
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_780
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_267
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_780: # %cond.load1001
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 252
+; CHECK-INDEXED-RV32-NEXT:    li a4, 251
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 3
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_781
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_268
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_781: # %cond.load1005
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a3, 253
+; CHECK-INDEXED-RV32-NEXT:    li a4, 252
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 2
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_1032
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_269
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1032: # %cond.load1005
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_270
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_782: # %cond.load1017
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    li a2, 256
+; CHECK-INDEXED-RV32-NEXT:    li a4, 255
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_783
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_274
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_783: # %cond.load1021
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 257
+; CHECK-INDEXED-RV32-NEXT:    li a4, 256
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 2
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_784
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_275
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_784: # %cond.load1025
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 258
+; CHECK-INDEXED-RV32-NEXT:    li a4, 257
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 4
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_785
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_276
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_785: # %cond.load1029
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 259
+; CHECK-INDEXED-RV32-NEXT:    li a4, 258
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 8
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_786
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_277
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_786: # %cond.load1033
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 260
+; CHECK-INDEXED-RV32-NEXT:    li a4, 259
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 16
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_787
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_278
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_787: # %cond.load1037
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 261
+; CHECK-INDEXED-RV32-NEXT:    li a4, 260
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 32
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_788
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_279
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_788: # %cond.load1041
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 262
+; CHECK-INDEXED-RV32-NEXT:    li a4, 261
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 64
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_789
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_280
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_789: # %cond.load1045
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 263
+; CHECK-INDEXED-RV32-NEXT:    li a4, 262
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 128
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_790
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_281
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_790: # %cond.load1049
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 264
+; CHECK-INDEXED-RV32-NEXT:    li a4, 263
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 256
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_791
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_282
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_791: # %cond.load1053
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 265
+; CHECK-INDEXED-RV32-NEXT:    li a4, 264
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 512
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_792
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_283
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_792: # %cond.load1057
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 266
+; CHECK-INDEXED-RV32-NEXT:    li a4, 265
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_793
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_284
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_793: # %cond.load1061
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 267
+; CHECK-INDEXED-RV32-NEXT:    li a4, 266
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 20
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_794
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_285
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_794: # %cond.load1065
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 268
+; CHECK-INDEXED-RV32-NEXT:    li a4, 267
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 19
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_795
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_286
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_795: # %cond.load1069
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 269
+; CHECK-INDEXED-RV32-NEXT:    li a4, 268
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 18
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_796
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_287
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_796: # %cond.load1073
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 270
+; CHECK-INDEXED-RV32-NEXT:    li a4, 269
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 17
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_797
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_288
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_797: # %cond.load1077
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 271
+; CHECK-INDEXED-RV32-NEXT:    li a4, 270
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 16
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_798
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_289
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_798: # %cond.load1081
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 272
+; CHECK-INDEXED-RV32-NEXT:    li a4, 271
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 15
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_799
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_290
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_799: # %cond.load1085
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 273
+; CHECK-INDEXED-RV32-NEXT:    li a4, 272
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 14
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_800
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_291
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_800: # %cond.load1089
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 274
+; CHECK-INDEXED-RV32-NEXT:    li a4, 273
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 13
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_801
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_292
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_801: # %cond.load1093
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 275
+; CHECK-INDEXED-RV32-NEXT:    li a4, 274
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 12
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_802
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_293
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_802: # %cond.load1097
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 276
+; CHECK-INDEXED-RV32-NEXT:    li a4, 275
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 11
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_803
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_294
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_803: # %cond.load1101
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 277
+; CHECK-INDEXED-RV32-NEXT:    li a4, 276
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 10
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_804
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_295
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_804: # %cond.load1105
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 278
+; CHECK-INDEXED-RV32-NEXT:    li a4, 277
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 9
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_805
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_296
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_805: # %cond.load1109
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 279
+; CHECK-INDEXED-RV32-NEXT:    li a4, 278
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 8
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_806
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_297
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_806: # %cond.load1113
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 280
+; CHECK-INDEXED-RV32-NEXT:    li a4, 279
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 7
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_807
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_298
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_807: # %cond.load1117
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 281
+; CHECK-INDEXED-RV32-NEXT:    li a4, 280
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 6
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_808
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_299
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_808: # %cond.load1121
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 282
+; CHECK-INDEXED-RV32-NEXT:    li a4, 281
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 5
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_809
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_300
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_809: # %cond.load1125
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 283
+; CHECK-INDEXED-RV32-NEXT:    li a4, 282
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 4
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_810
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_301
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_810: # %cond.load1129
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 284
+; CHECK-INDEXED-RV32-NEXT:    li a4, 283
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 3
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_811
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_302
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_811: # %cond.load1133
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 285
+; CHECK-INDEXED-RV32-NEXT:    li a4, 284
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 2
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_1033
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_303
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1033: # %cond.load1133
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_304
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_812: # %cond.load1145
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 288
+; CHECK-INDEXED-RV32-NEXT:    li a4, 287
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_813
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_308
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_813: # %cond.load1149
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 289
+; CHECK-INDEXED-RV32-NEXT:    li a4, 288
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 2
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_814
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_309
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_814: # %cond.load1153
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 290
+; CHECK-INDEXED-RV32-NEXT:    li a4, 289
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 4
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_815
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_310
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_815: # %cond.load1157
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 291
+; CHECK-INDEXED-RV32-NEXT:    li a4, 290
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 8
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_816
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_311
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_816: # %cond.load1161
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 292
+; CHECK-INDEXED-RV32-NEXT:    li a4, 291
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 16
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_817
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_312
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_817: # %cond.load1165
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 293
+; CHECK-INDEXED-RV32-NEXT:    li a4, 292
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 32
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_818
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_313
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_818: # %cond.load1169
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 294
+; CHECK-INDEXED-RV32-NEXT:    li a4, 293
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 64
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_819
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_314
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_819: # %cond.load1173
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 295
+; CHECK-INDEXED-RV32-NEXT:    li a4, 294
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 128
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_820
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_315
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_820: # %cond.load1177
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 296
+; CHECK-INDEXED-RV32-NEXT:    li a4, 295
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 256
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_821
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_316
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_821: # %cond.load1181
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 297
+; CHECK-INDEXED-RV32-NEXT:    li a4, 296
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 512
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_822
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_317
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_822: # %cond.load1185
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 298
+; CHECK-INDEXED-RV32-NEXT:    li a4, 297
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_823
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_318
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_823: # %cond.load1189
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 299
+; CHECK-INDEXED-RV32-NEXT:    li a4, 298
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 20
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_824
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_319
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_824: # %cond.load1193
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 300
+; CHECK-INDEXED-RV32-NEXT:    li a4, 299
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 19
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_825
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_320
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_825: # %cond.load1197
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 301
+; CHECK-INDEXED-RV32-NEXT:    li a4, 300
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 18
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_826
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_321
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_826: # %cond.load1201
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 302
+; CHECK-INDEXED-RV32-NEXT:    li a4, 301
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 17
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_827
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_322
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_827: # %cond.load1205
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 303
+; CHECK-INDEXED-RV32-NEXT:    li a4, 302
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 16
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_828
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_323
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_828: # %cond.load1209
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 304
+; CHECK-INDEXED-RV32-NEXT:    li a4, 303
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 15
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_829
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_324
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_829: # %cond.load1213
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 305
+; CHECK-INDEXED-RV32-NEXT:    li a4, 304
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 14
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_830
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_325
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_830: # %cond.load1217
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 306
+; CHECK-INDEXED-RV32-NEXT:    li a4, 305
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 13
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_831
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_326
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_831: # %cond.load1221
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 307
+; CHECK-INDEXED-RV32-NEXT:    li a4, 306
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 12
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_832
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_327
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_832: # %cond.load1225
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 308
+; CHECK-INDEXED-RV32-NEXT:    li a4, 307
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 11
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_833
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_328
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_833: # %cond.load1229
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 309
+; CHECK-INDEXED-RV32-NEXT:    li a4, 308
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 10
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_834
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_329
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_834: # %cond.load1233
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 310
+; CHECK-INDEXED-RV32-NEXT:    li a4, 309
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 9
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_835
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_330
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_835: # %cond.load1237
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 311
+; CHECK-INDEXED-RV32-NEXT:    li a4, 310
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 8
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_836
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_331
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_836: # %cond.load1241
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 312
+; CHECK-INDEXED-RV32-NEXT:    li a4, 311
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 7
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_837
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_332
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_837: # %cond.load1245
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 313
+; CHECK-INDEXED-RV32-NEXT:    li a4, 312
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 6
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_838
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_333
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_838: # %cond.load1249
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 314
+; CHECK-INDEXED-RV32-NEXT:    li a4, 313
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 5
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_839
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_334
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_839: # %cond.load1253
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 315
+; CHECK-INDEXED-RV32-NEXT:    li a4, 314
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 4
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_840
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_335
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_840: # %cond.load1257
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 316
+; CHECK-INDEXED-RV32-NEXT:    li a4, 315
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 3
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_841
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_336
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_841: # %cond.load1261
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 317
+; CHECK-INDEXED-RV32-NEXT:    li a4, 316
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 2
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_1034
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_337
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1034: # %cond.load1261
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_338
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_842: # %cond.load1273
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 320
+; CHECK-INDEXED-RV32-NEXT:    li a4, 319
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_843
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_342
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_843: # %cond.load1277
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 321
+; CHECK-INDEXED-RV32-NEXT:    li a4, 320
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 2
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_844
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_343
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_844: # %cond.load1281
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 322
+; CHECK-INDEXED-RV32-NEXT:    li a4, 321
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 4
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_845
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_344
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_845: # %cond.load1285
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 323
+; CHECK-INDEXED-RV32-NEXT:    li a4, 322
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 8
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_846
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_345
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_846: # %cond.load1289
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 324
+; CHECK-INDEXED-RV32-NEXT:    li a4, 323
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 16
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_847
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_346
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_847: # %cond.load1293
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 325
+; CHECK-INDEXED-RV32-NEXT:    li a4, 324
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 32
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_848
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_347
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_848: # %cond.load1297
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 326
+; CHECK-INDEXED-RV32-NEXT:    li a4, 325
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 64
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_849
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_348
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_849: # %cond.load1301
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 327
+; CHECK-INDEXED-RV32-NEXT:    li a4, 326
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 128
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_850
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_349
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_850: # %cond.load1305
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 328
+; CHECK-INDEXED-RV32-NEXT:    li a4, 327
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 256
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_851
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_350
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_851: # %cond.load1309
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 329
+; CHECK-INDEXED-RV32-NEXT:    li a4, 328
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 512
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_852
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_351
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_852: # %cond.load1313
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 330
+; CHECK-INDEXED-RV32-NEXT:    li a4, 329
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_853
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_352
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_853: # %cond.load1317
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 331
+; CHECK-INDEXED-RV32-NEXT:    li a4, 330
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 20
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_854
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_353
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_854: # %cond.load1321
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 332
+; CHECK-INDEXED-RV32-NEXT:    li a4, 331
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 19
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_855
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_354
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_855: # %cond.load1325
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 333
+; CHECK-INDEXED-RV32-NEXT:    li a4, 332
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 18
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_856
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_355
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_856: # %cond.load1329
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 334
+; CHECK-INDEXED-RV32-NEXT:    li a4, 333
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 17
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_857
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_356
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_857: # %cond.load1333
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 335
+; CHECK-INDEXED-RV32-NEXT:    li a4, 334
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 16
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_858
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_357
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_858: # %cond.load1337
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 336
+; CHECK-INDEXED-RV32-NEXT:    li a4, 335
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 15
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_859
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_358
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_859: # %cond.load1341
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 337
+; CHECK-INDEXED-RV32-NEXT:    li a4, 336
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 14
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_860
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_359
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_860: # %cond.load1345
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 338
+; CHECK-INDEXED-RV32-NEXT:    li a4, 337
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 13
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_861
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_360
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_861: # %cond.load1349
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 339
+; CHECK-INDEXED-RV32-NEXT:    li a4, 338
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 12
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_862
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_361
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_862: # %cond.load1353
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 340
+; CHECK-INDEXED-RV32-NEXT:    li a4, 339
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 11
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_863
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_362
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_863: # %cond.load1357
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 341
+; CHECK-INDEXED-RV32-NEXT:    li a4, 340
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 10
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_864
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_363
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_864: # %cond.load1361
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 342
+; CHECK-INDEXED-RV32-NEXT:    li a4, 341
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 9
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_865
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_364
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_865: # %cond.load1365
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 343
+; CHECK-INDEXED-RV32-NEXT:    li a4, 342
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 8
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_866
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_365
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_866: # %cond.load1369
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 344
+; CHECK-INDEXED-RV32-NEXT:    li a4, 343
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 7
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_867
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_366
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_867: # %cond.load1373
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 345
+; CHECK-INDEXED-RV32-NEXT:    li a4, 344
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 6
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_868
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_367
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_868: # %cond.load1377
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 346
+; CHECK-INDEXED-RV32-NEXT:    li a4, 345
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 5
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_869
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_368
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_869: # %cond.load1381
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 347
+; CHECK-INDEXED-RV32-NEXT:    li a4, 346
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 4
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_870
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_369
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_870: # %cond.load1385
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 348
+; CHECK-INDEXED-RV32-NEXT:    li a4, 347
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 3
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_871
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_370
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_871: # %cond.load1389
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 349
+; CHECK-INDEXED-RV32-NEXT:    li a4, 348
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 2
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_1035
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_371
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1035: # %cond.load1389
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_372
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_872: # %cond.load1401
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 352
+; CHECK-INDEXED-RV32-NEXT:    li a4, 351
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_873
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_376
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_873: # %cond.load1405
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 353
+; CHECK-INDEXED-RV32-NEXT:    li a4, 352
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 2
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_874
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_377
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_874: # %cond.load1409
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 354
+; CHECK-INDEXED-RV32-NEXT:    li a4, 353
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 4
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_875
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_378
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_875: # %cond.load1413
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 355
+; CHECK-INDEXED-RV32-NEXT:    li a4, 354
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 8
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_876
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_379
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_876: # %cond.load1417
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 356
+; CHECK-INDEXED-RV32-NEXT:    li a4, 355
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 16
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_877
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_380
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_877: # %cond.load1421
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 357
+; CHECK-INDEXED-RV32-NEXT:    li a4, 356
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 32
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_878
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_381
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_878: # %cond.load1425
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 358
+; CHECK-INDEXED-RV32-NEXT:    li a4, 357
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 64
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_879
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_382
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_879: # %cond.load1429
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 359
+; CHECK-INDEXED-RV32-NEXT:    li a4, 358
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 128
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_880
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_383
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_880: # %cond.load1433
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 360
+; CHECK-INDEXED-RV32-NEXT:    li a4, 359
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 256
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_881
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_384
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_881: # %cond.load1437
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 361
+; CHECK-INDEXED-RV32-NEXT:    li a4, 360
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 512
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_882
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_385
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_882: # %cond.load1441
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 362
+; CHECK-INDEXED-RV32-NEXT:    li a4, 361
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_883
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_386
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_883: # %cond.load1445
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 363
+; CHECK-INDEXED-RV32-NEXT:    li a4, 362
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 20
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_884
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_387
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_884: # %cond.load1449
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 364
+; CHECK-INDEXED-RV32-NEXT:    li a4, 363
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 19
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_885
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_388
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_885: # %cond.load1453
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 365
+; CHECK-INDEXED-RV32-NEXT:    li a4, 364
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 18
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_886
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_389
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_886: # %cond.load1457
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 366
+; CHECK-INDEXED-RV32-NEXT:    li a4, 365
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 17
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_887
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_390
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_887: # %cond.load1461
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 367
+; CHECK-INDEXED-RV32-NEXT:    li a4, 366
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 16
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_888
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_391
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_888: # %cond.load1465
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 368
+; CHECK-INDEXED-RV32-NEXT:    li a4, 367
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 15
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_889
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_392
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_889: # %cond.load1469
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 369
+; CHECK-INDEXED-RV32-NEXT:    li a4, 368
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 14
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_890
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_393
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_890: # %cond.load1473
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 370
+; CHECK-INDEXED-RV32-NEXT:    li a4, 369
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 13
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_891
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_394
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_891: # %cond.load1477
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 371
+; CHECK-INDEXED-RV32-NEXT:    li a4, 370
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 12
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_892
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_395
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_892: # %cond.load1481
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 372
+; CHECK-INDEXED-RV32-NEXT:    li a4, 371
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 11
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_893
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_396
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_893: # %cond.load1485
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 373
+; CHECK-INDEXED-RV32-NEXT:    li a4, 372
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 10
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_894
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_397
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_894: # %cond.load1489
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 374
+; CHECK-INDEXED-RV32-NEXT:    li a4, 373
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 9
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_895
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_398
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_895: # %cond.load1493
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 375
+; CHECK-INDEXED-RV32-NEXT:    li a4, 374
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 8
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_896
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_399
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_896: # %cond.load1497
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 376
+; CHECK-INDEXED-RV32-NEXT:    li a4, 375
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 7
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_897
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_400
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_897: # %cond.load1501
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 377
+; CHECK-INDEXED-RV32-NEXT:    li a4, 376
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 6
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_898
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_401
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_898: # %cond.load1505
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 378
+; CHECK-INDEXED-RV32-NEXT:    li a4, 377
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 5
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_899
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_402
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_899: # %cond.load1509
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 379
+; CHECK-INDEXED-RV32-NEXT:    li a4, 378
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 4
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_900
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_403
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_900: # %cond.load1513
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 380
+; CHECK-INDEXED-RV32-NEXT:    li a4, 379
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 3
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_901
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_404
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_901: # %cond.load1517
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 381
+; CHECK-INDEXED-RV32-NEXT:    li a4, 380
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 2
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_1036
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_405
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1036: # %cond.load1517
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_406
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_902: # %cond.load1529
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 384
+; CHECK-INDEXED-RV32-NEXT:    li a4, 383
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_903
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_410
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_903: # %cond.load1533
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 385
+; CHECK-INDEXED-RV32-NEXT:    li a4, 384
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 2
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_904
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_411
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_904: # %cond.load1537
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 386
+; CHECK-INDEXED-RV32-NEXT:    li a4, 385
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 4
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_905
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_412
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_905: # %cond.load1541
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 387
+; CHECK-INDEXED-RV32-NEXT:    li a4, 386
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 8
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_906
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_413
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_906: # %cond.load1545
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 388
+; CHECK-INDEXED-RV32-NEXT:    li a4, 387
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 16
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_907
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_414
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_907: # %cond.load1549
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 389
+; CHECK-INDEXED-RV32-NEXT:    li a4, 388
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 32
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_908
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_415
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_908: # %cond.load1553
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 390
+; CHECK-INDEXED-RV32-NEXT:    li a4, 389
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 64
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_909
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_416
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_909: # %cond.load1557
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 391
+; CHECK-INDEXED-RV32-NEXT:    li a4, 390
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 128
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_910
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_417
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_910: # %cond.load1561
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 392
+; CHECK-INDEXED-RV32-NEXT:    li a4, 391
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 256
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_911
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_418
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_911: # %cond.load1565
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 393
+; CHECK-INDEXED-RV32-NEXT:    li a4, 392
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 512
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_912
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_419
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_912: # %cond.load1569
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 394
+; CHECK-INDEXED-RV32-NEXT:    li a4, 393
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_913
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_420
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_913: # %cond.load1573
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 395
+; CHECK-INDEXED-RV32-NEXT:    li a4, 394
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 20
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_914
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_421
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_914: # %cond.load1577
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 396
+; CHECK-INDEXED-RV32-NEXT:    li a4, 395
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 19
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_915
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_422
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_915: # %cond.load1581
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 397
+; CHECK-INDEXED-RV32-NEXT:    li a4, 396
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 18
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_916
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_423
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_916: # %cond.load1585
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 398
+; CHECK-INDEXED-RV32-NEXT:    li a4, 397
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 17
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_917
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_424
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_917: # %cond.load1589
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 399
+; CHECK-INDEXED-RV32-NEXT:    li a4, 398
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 16
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_918
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_425
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_918: # %cond.load1593
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 400
+; CHECK-INDEXED-RV32-NEXT:    li a4, 399
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 15
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_919
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_426
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_919: # %cond.load1597
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 401
+; CHECK-INDEXED-RV32-NEXT:    li a4, 400
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 14
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_920
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_427
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_920: # %cond.load1601
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 402
+; CHECK-INDEXED-RV32-NEXT:    li a4, 401
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 13
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_921
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_428
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_921: # %cond.load1605
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 403
+; CHECK-INDEXED-RV32-NEXT:    li a4, 402
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 12
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_922
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_429
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_922: # %cond.load1609
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 404
+; CHECK-INDEXED-RV32-NEXT:    li a4, 403
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 11
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_923
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_430
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_923: # %cond.load1613
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 405
+; CHECK-INDEXED-RV32-NEXT:    li a4, 404
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 10
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_924
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_431
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_924: # %cond.load1617
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 406
+; CHECK-INDEXED-RV32-NEXT:    li a4, 405
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 9
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_925
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_432
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_925: # %cond.load1621
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 407
+; CHECK-INDEXED-RV32-NEXT:    li a4, 406
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 8
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_926
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_433
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_926: # %cond.load1625
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 408
+; CHECK-INDEXED-RV32-NEXT:    li a4, 407
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 7
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_927
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_434
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_927: # %cond.load1629
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 409
+; CHECK-INDEXED-RV32-NEXT:    li a4, 408
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 6
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_928
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_435
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_928: # %cond.load1633
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 410
+; CHECK-INDEXED-RV32-NEXT:    li a4, 409
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 5
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_929
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_436
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_929: # %cond.load1637
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 411
+; CHECK-INDEXED-RV32-NEXT:    li a4, 410
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 4
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_930
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_437
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_930: # %cond.load1641
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 412
+; CHECK-INDEXED-RV32-NEXT:    li a4, 411
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 3
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_931
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_438
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_931: # %cond.load1645
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 413
+; CHECK-INDEXED-RV32-NEXT:    li a4, 412
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 2
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_1037
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_439
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1037: # %cond.load1645
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_440
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_932: # %cond.load1657
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 416
+; CHECK-INDEXED-RV32-NEXT:    li a4, 415
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_933
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_444
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_933: # %cond.load1661
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 417
+; CHECK-INDEXED-RV32-NEXT:    li a4, 416
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 2
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_934
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_445
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_934: # %cond.load1665
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 418
+; CHECK-INDEXED-RV32-NEXT:    li a4, 417
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 4
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_935
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_446
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_935: # %cond.load1669
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 419
+; CHECK-INDEXED-RV32-NEXT:    li a4, 418
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 8
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_936
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_447
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_936: # %cond.load1673
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 420
+; CHECK-INDEXED-RV32-NEXT:    li a4, 419
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 16
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_937
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_448
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_937: # %cond.load1677
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 421
+; CHECK-INDEXED-RV32-NEXT:    li a4, 420
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 32
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_938
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_449
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_938: # %cond.load1681
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 422
+; CHECK-INDEXED-RV32-NEXT:    li a4, 421
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 64
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_939
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_450
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_939: # %cond.load1685
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 423
+; CHECK-INDEXED-RV32-NEXT:    li a4, 422
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 128
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_940
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_451
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_940: # %cond.load1689
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 424
+; CHECK-INDEXED-RV32-NEXT:    li a4, 423
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 256
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_941
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_452
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_941: # %cond.load1693
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 425
+; CHECK-INDEXED-RV32-NEXT:    li a4, 424
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 512
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_942
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_453
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_942: # %cond.load1697
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 426
+; CHECK-INDEXED-RV32-NEXT:    li a4, 425
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_943
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_454
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_943: # %cond.load1701
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 427
+; CHECK-INDEXED-RV32-NEXT:    li a4, 426
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 20
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_944
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_455
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_944: # %cond.load1705
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 428
+; CHECK-INDEXED-RV32-NEXT:    li a4, 427
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 19
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_945
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_456
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_945: # %cond.load1709
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 429
+; CHECK-INDEXED-RV32-NEXT:    li a4, 428
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 18
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_946
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_457
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_946: # %cond.load1713
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 430
+; CHECK-INDEXED-RV32-NEXT:    li a4, 429
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 17
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_947
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_458
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_947: # %cond.load1717
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 431
+; CHECK-INDEXED-RV32-NEXT:    li a4, 430
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 16
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_948
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_459
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_948: # %cond.load1721
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 432
+; CHECK-INDEXED-RV32-NEXT:    li a4, 431
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 15
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_949
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_460
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_949: # %cond.load1725
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 433
+; CHECK-INDEXED-RV32-NEXT:    li a4, 432
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 14
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_950
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_461
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_950: # %cond.load1729
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 434
+; CHECK-INDEXED-RV32-NEXT:    li a4, 433
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 13
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_951
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_462
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_951: # %cond.load1733
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 435
+; CHECK-INDEXED-RV32-NEXT:    li a4, 434
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 12
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_952
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_463
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_952: # %cond.load1737
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 436
+; CHECK-INDEXED-RV32-NEXT:    li a4, 435
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 11
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_953
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_464
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_953: # %cond.load1741
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 437
+; CHECK-INDEXED-RV32-NEXT:    li a4, 436
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 10
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_954
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_465
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_954: # %cond.load1745
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 438
+; CHECK-INDEXED-RV32-NEXT:    li a4, 437
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 9
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_955
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_466
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_955: # %cond.load1749
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 439
+; CHECK-INDEXED-RV32-NEXT:    li a4, 438
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 8
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_956
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_467
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_956: # %cond.load1753
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 440
+; CHECK-INDEXED-RV32-NEXT:    li a4, 439
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 7
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_957
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_468
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_957: # %cond.load1757
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 441
+; CHECK-INDEXED-RV32-NEXT:    li a4, 440
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 6
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_958
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_469
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_958: # %cond.load1761
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 442
+; CHECK-INDEXED-RV32-NEXT:    li a4, 441
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 5
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_959
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_470
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_959: # %cond.load1765
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 443
+; CHECK-INDEXED-RV32-NEXT:    li a4, 442
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 4
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_960
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_471
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_960: # %cond.load1769
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 444
+; CHECK-INDEXED-RV32-NEXT:    li a4, 443
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 3
+; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_961
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_472
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_961: # %cond.load1773
+; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-INDEXED-RV32-NEXT:    li a3, 445
+; CHECK-INDEXED-RV32-NEXT:    li a4, 444
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 2
+; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_1038
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_473
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1038: # %cond.load1773
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_474
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_962: # %cond.load1785
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 448
+; CHECK-INDEXED-RV32-NEXT:    li a4, 447
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_963
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_478
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_963: # %cond.load1789
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 449
+; CHECK-INDEXED-RV32-NEXT:    li a4, 448
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 2
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_964
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_479
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_964: # %cond.load1793
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 450
+; CHECK-INDEXED-RV32-NEXT:    li a4, 449
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 4
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_965
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_480
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_965: # %cond.load1797
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 451
+; CHECK-INDEXED-RV32-NEXT:    li a4, 450
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 8
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_966
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_481
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_966: # %cond.load1801
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 452
+; CHECK-INDEXED-RV32-NEXT:    li a4, 451
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 16
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_967
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_482
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_967: # %cond.load1805
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 453
+; CHECK-INDEXED-RV32-NEXT:    li a4, 452
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 32
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_968
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_483
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_968: # %cond.load1809
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 454
+; CHECK-INDEXED-RV32-NEXT:    li a4, 453
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 64
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_969
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_484
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_969: # %cond.load1813
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 455
+; CHECK-INDEXED-RV32-NEXT:    li a4, 454
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 128
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_970
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_485
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_970: # %cond.load1817
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 456
+; CHECK-INDEXED-RV32-NEXT:    li a4, 455
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 256
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_971
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_486
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_971: # %cond.load1821
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 457
+; CHECK-INDEXED-RV32-NEXT:    li a4, 456
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 512
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_972
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_487
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_972: # %cond.load1825
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 458
+; CHECK-INDEXED-RV32-NEXT:    li a4, 457
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_973
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_488
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_973: # %cond.load1829
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 459
+; CHECK-INDEXED-RV32-NEXT:    li a4, 458
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 20
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_974
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_489
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_974: # %cond.load1833
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 460
+; CHECK-INDEXED-RV32-NEXT:    li a4, 459
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 19
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_975
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_490
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_975: # %cond.load1837
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 461
+; CHECK-INDEXED-RV32-NEXT:    li a4, 460
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 18
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_976
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_491
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_976: # %cond.load1841
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 462
+; CHECK-INDEXED-RV32-NEXT:    li a4, 461
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 17
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_977
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_492
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_977: # %cond.load1845
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 463
+; CHECK-INDEXED-RV32-NEXT:    li a4, 462
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 16
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_978
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_493
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_978: # %cond.load1849
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 464
+; CHECK-INDEXED-RV32-NEXT:    li a4, 463
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 15
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_979
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_494
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_979: # %cond.load1853
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 465
+; CHECK-INDEXED-RV32-NEXT:    li a4, 464
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 14
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_980
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_495
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_980: # %cond.load1857
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 466
+; CHECK-INDEXED-RV32-NEXT:    li a4, 465
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 13
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_981
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_496
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_981: # %cond.load1861
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 467
+; CHECK-INDEXED-RV32-NEXT:    li a4, 466
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 12
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_982
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_497
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_982: # %cond.load1865
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 468
+; CHECK-INDEXED-RV32-NEXT:    li a4, 467
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 11
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_983
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_498
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_983: # %cond.load1869
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 469
+; CHECK-INDEXED-RV32-NEXT:    li a4, 468
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 10
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_984
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_499
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_984: # %cond.load1873
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 470
+; CHECK-INDEXED-RV32-NEXT:    li a4, 469
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 9
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_985
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_500
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_985: # %cond.load1877
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 471
+; CHECK-INDEXED-RV32-NEXT:    li a4, 470
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 8
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_986
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_501
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_986: # %cond.load1881
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 472
+; CHECK-INDEXED-RV32-NEXT:    li a4, 471
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 7
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_987
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_502
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_987: # %cond.load1885
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 473
+; CHECK-INDEXED-RV32-NEXT:    li a4, 472
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 6
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_988
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_503
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_988: # %cond.load1889
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 474
+; CHECK-INDEXED-RV32-NEXT:    li a4, 473
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 5
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_989
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_504
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_989: # %cond.load1893
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 475
+; CHECK-INDEXED-RV32-NEXT:    li a4, 474
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 4
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_990
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_505
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_990: # %cond.load1897
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 476
+; CHECK-INDEXED-RV32-NEXT:    li a4, 475
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 3
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_991
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_506
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_991: # %cond.load1901
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a4, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 477
+; CHECK-INDEXED-RV32-NEXT:    li a4, 476
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 2
+; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_1039
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_507
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1039: # %cond.load1901
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_508
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_992: # %cond.load1913
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 480
+; CHECK-INDEXED-RV32-NEXT:    li a3, 479
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 1
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_993
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_512
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_993: # %cond.load1917
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a3, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 481
+; CHECK-INDEXED-RV32-NEXT:    li a3, 480
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 2
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_994
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_513
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_994: # %cond.load1921
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a3, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 482
+; CHECK-INDEXED-RV32-NEXT:    li a3, 481
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 4
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_995
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_514
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_995: # %cond.load1925
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a3, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 483
+; CHECK-INDEXED-RV32-NEXT:    li a3, 482
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 8
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_996
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_515
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_996: # %cond.load1929
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a3, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 484
+; CHECK-INDEXED-RV32-NEXT:    li a3, 483
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 16
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_997
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_516
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_997: # %cond.load1933
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a3, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 485
+; CHECK-INDEXED-RV32-NEXT:    li a3, 484
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 32
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_998
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_517
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_998: # %cond.load1937
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a3, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 486
+; CHECK-INDEXED-RV32-NEXT:    li a3, 485
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 64
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_999
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_518
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_999: # %cond.load1941
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a3, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 487
+; CHECK-INDEXED-RV32-NEXT:    li a3, 486
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 128
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_1000
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_519
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1000: # %cond.load1945
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a3, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 488
+; CHECK-INDEXED-RV32-NEXT:    li a3, 487
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 256
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_1001
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_520
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1001: # %cond.load1949
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a3, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 489
+; CHECK-INDEXED-RV32-NEXT:    li a3, 488
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 512
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_1002
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_521
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1002: # %cond.load1953
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a3, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 490
+; CHECK-INDEXED-RV32-NEXT:    li a3, 489
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 1024
+; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_1003
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_522
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1003: # %cond.load1957
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a3, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 491
+; CHECK-INDEXED-RV32-NEXT:    li a3, 490
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 20
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_1004
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_523
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1004: # %cond.load1961
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a3, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 492
+; CHECK-INDEXED-RV32-NEXT:    li a3, 491
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 19
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_1005
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_524
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1005: # %cond.load1965
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a3, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 493
+; CHECK-INDEXED-RV32-NEXT:    li a3, 492
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 18
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_1006
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_525
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1006: # %cond.load1969
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a3, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 494
+; CHECK-INDEXED-RV32-NEXT:    li a3, 493
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 17
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_1007
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_526
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1007: # %cond.load1973
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a3, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 495
+; CHECK-INDEXED-RV32-NEXT:    li a3, 494
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 16
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_1008
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_527
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1008: # %cond.load1977
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a3, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 496
+; CHECK-INDEXED-RV32-NEXT:    li a3, 495
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 15
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_1009
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_528
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1009: # %cond.load1981
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a3, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 497
+; CHECK-INDEXED-RV32-NEXT:    li a3, 496
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 14
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_1010
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_529
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1010: # %cond.load1985
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a3, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 498
+; CHECK-INDEXED-RV32-NEXT:    li a3, 497
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 13
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_1011
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_530
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1011: # %cond.load1989
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a3, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 499
+; CHECK-INDEXED-RV32-NEXT:    li a3, 498
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 12
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_1012
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_531
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1012: # %cond.load1993
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a3, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 500
+; CHECK-INDEXED-RV32-NEXT:    li a3, 499
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 11
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_1013
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_532
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1013: # %cond.load1997
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a3, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 501
+; CHECK-INDEXED-RV32-NEXT:    li a3, 500
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 10
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_1014
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_533
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1014: # %cond.load2001
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a3, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 502
+; CHECK-INDEXED-RV32-NEXT:    li a3, 501
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 9
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_1015
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_534
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1015: # %cond.load2005
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a3, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 503
+; CHECK-INDEXED-RV32-NEXT:    li a3, 502
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 8
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_1016
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_535
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1016: # %cond.load2009
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a3, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 504
+; CHECK-INDEXED-RV32-NEXT:    li a3, 503
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 7
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_1017
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_536
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1017: # %cond.load2013
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a3, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 505
+; CHECK-INDEXED-RV32-NEXT:    li a3, 504
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 6
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_1018
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_537
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1018: # %cond.load2017
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a3, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 506
+; CHECK-INDEXED-RV32-NEXT:    li a3, 505
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 5
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_1019
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_538
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1019: # %cond.load2021
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a3, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 507
+; CHECK-INDEXED-RV32-NEXT:    li a3, 506
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 4
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_1020
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_539
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1020: # %cond.load2025
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a3, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 508
+; CHECK-INDEXED-RV32-NEXT:    li a3, 507
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 3
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_1021
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_540
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1021: # %cond.load2029
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a3, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 509
+; CHECK-INDEXED-RV32-NEXT:    li a3, 508
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 2
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_1022
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_541
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1022: # %cond.load2033
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a3, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 510
+; CHECK-INDEXED-RV32-NEXT:    li a3, 509
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 1
+; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_1023
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_542
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1023: # %cond.load2037
+; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a3, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV32-NEXT:    li a2, 511
+; CHECK-INDEXED-RV32-NEXT:    li a3, 510
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV32-NEXT:    bltz a1, .LBB61_1024
+; CHECK-INDEXED-RV32-NEXT:    j .LBB61_543
+; CHECK-INDEXED-RV32-NEXT:  .LBB61_1024: # %cond.load2041
+; CHECK-INDEXED-RV32-NEXT:    lbu a0, 0(a0)
+; CHECK-INDEXED-RV32-NEXT:    li a1, 512
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a0
+; CHECK-INDEXED-RV32-NEXT:    li a0, 511
+; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a0
+; CHECK-INDEXED-RV32-NEXT:    ret
 ;
-; CHECK-RV64-LABEL: test_expandload_v512i8_vlen512:
-; CHECK-RV64:       # %bb.0:
-; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.x.s a2, v0
-; CHECK-RV64-NEXT:    andi a1, a2, 1
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_1
-; CHECK-RV64-NEXT:    j .LBB61_527
-; CHECK-RV64-NEXT:  .LBB61_1: # %else
-; CHECK-RV64-NEXT:    andi a1, a2, 2
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_2
-; CHECK-RV64-NEXT:    j .LBB61_528
-; CHECK-RV64-NEXT:  .LBB61_2: # %else2
-; CHECK-RV64-NEXT:    andi a1, a2, 4
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_3
-; CHECK-RV64-NEXT:    j .LBB61_529
-; CHECK-RV64-NEXT:  .LBB61_3: # %else6
-; CHECK-RV64-NEXT:    andi a1, a2, 8
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_4
-; CHECK-RV64-NEXT:    j .LBB61_530
-; CHECK-RV64-NEXT:  .LBB61_4: # %else10
-; CHECK-RV64-NEXT:    andi a1, a2, 16
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_5
-; CHECK-RV64-NEXT:    j .LBB61_531
-; CHECK-RV64-NEXT:  .LBB61_5: # %else14
-; CHECK-RV64-NEXT:    andi a1, a2, 32
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_6
-; CHECK-RV64-NEXT:    j .LBB61_532
-; CHECK-RV64-NEXT:  .LBB61_6: # %else18
-; CHECK-RV64-NEXT:    andi a1, a2, 64
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_7
-; CHECK-RV64-NEXT:    j .LBB61_533
-; CHECK-RV64-NEXT:  .LBB61_7: # %else22
-; CHECK-RV64-NEXT:    andi a1, a2, 128
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_8
-; CHECK-RV64-NEXT:    j .LBB61_534
-; CHECK-RV64-NEXT:  .LBB61_8: # %else26
-; CHECK-RV64-NEXT:    andi a1, a2, 256
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_9
-; CHECK-RV64-NEXT:    j .LBB61_535
-; CHECK-RV64-NEXT:  .LBB61_9: # %else30
-; CHECK-RV64-NEXT:    andi a1, a2, 512
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_10
-; CHECK-RV64-NEXT:    j .LBB61_536
-; CHECK-RV64-NEXT:  .LBB61_10: # %else34
-; CHECK-RV64-NEXT:    andi a1, a2, 1024
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_11
-; CHECK-RV64-NEXT:    j .LBB61_537
-; CHECK-RV64-NEXT:  .LBB61_11: # %else38
-; CHECK-RV64-NEXT:    slli a1, a2, 52
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_12
-; CHECK-RV64-NEXT:    j .LBB61_538
-; CHECK-RV64-NEXT:  .LBB61_12: # %else42
-; CHECK-RV64-NEXT:    slli a1, a2, 51
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_13
-; CHECK-RV64-NEXT:    j .LBB61_539
-; CHECK-RV64-NEXT:  .LBB61_13: # %else46
-; CHECK-RV64-NEXT:    slli a1, a2, 50
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_14
-; CHECK-RV64-NEXT:    j .LBB61_540
-; CHECK-RV64-NEXT:  .LBB61_14: # %else50
-; CHECK-RV64-NEXT:    slli a1, a2, 49
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_15
-; CHECK-RV64-NEXT:    j .LBB61_541
-; CHECK-RV64-NEXT:  .LBB61_15: # %else54
-; CHECK-RV64-NEXT:    slli a1, a2, 48
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_16
-; CHECK-RV64-NEXT:    j .LBB61_542
-; CHECK-RV64-NEXT:  .LBB61_16: # %else58
-; CHECK-RV64-NEXT:    slli a1, a2, 47
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_17
-; CHECK-RV64-NEXT:    j .LBB61_543
-; CHECK-RV64-NEXT:  .LBB61_17: # %else62
-; CHECK-RV64-NEXT:    slli a1, a2, 46
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_18
-; CHECK-RV64-NEXT:    j .LBB61_544
-; CHECK-RV64-NEXT:  .LBB61_18: # %else66
-; CHECK-RV64-NEXT:    slli a1, a2, 45
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_19
-; CHECK-RV64-NEXT:    j .LBB61_545
-; CHECK-RV64-NEXT:  .LBB61_19: # %else70
-; CHECK-RV64-NEXT:    slli a1, a2, 44
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_20
-; CHECK-RV64-NEXT:    j .LBB61_546
-; CHECK-RV64-NEXT:  .LBB61_20: # %else74
-; CHECK-RV64-NEXT:    slli a1, a2, 43
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_21
-; CHECK-RV64-NEXT:    j .LBB61_547
-; CHECK-RV64-NEXT:  .LBB61_21: # %else78
-; CHECK-RV64-NEXT:    slli a1, a2, 42
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_22
-; CHECK-RV64-NEXT:    j .LBB61_548
-; CHECK-RV64-NEXT:  .LBB61_22: # %else82
-; CHECK-RV64-NEXT:    slli a1, a2, 41
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_23
-; CHECK-RV64-NEXT:    j .LBB61_549
-; CHECK-RV64-NEXT:  .LBB61_23: # %else86
-; CHECK-RV64-NEXT:    slli a1, a2, 40
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_24
-; CHECK-RV64-NEXT:    j .LBB61_550
-; CHECK-RV64-NEXT:  .LBB61_24: # %else90
-; CHECK-RV64-NEXT:    slli a1, a2, 39
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_25
-; CHECK-RV64-NEXT:    j .LBB61_551
-; CHECK-RV64-NEXT:  .LBB61_25: # %else94
-; CHECK-RV64-NEXT:    slli a1, a2, 38
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_26
-; CHECK-RV64-NEXT:    j .LBB61_552
-; CHECK-RV64-NEXT:  .LBB61_26: # %else98
-; CHECK-RV64-NEXT:    slli a1, a2, 37
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_27
-; CHECK-RV64-NEXT:    j .LBB61_553
-; CHECK-RV64-NEXT:  .LBB61_27: # %else102
-; CHECK-RV64-NEXT:    slli a1, a2, 36
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_28
-; CHECK-RV64-NEXT:    j .LBB61_554
-; CHECK-RV64-NEXT:  .LBB61_28: # %else106
-; CHECK-RV64-NEXT:    slli a1, a2, 35
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_29
-; CHECK-RV64-NEXT:    j .LBB61_555
-; CHECK-RV64-NEXT:  .LBB61_29: # %else110
-; CHECK-RV64-NEXT:    slli a1, a2, 34
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_30
-; CHECK-RV64-NEXT:    j .LBB61_556
-; CHECK-RV64-NEXT:  .LBB61_30: # %else114
-; CHECK-RV64-NEXT:    slli a1, a2, 33
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_31
-; CHECK-RV64-NEXT:    j .LBB61_557
-; CHECK-RV64-NEXT:  .LBB61_31: # %else118
-; CHECK-RV64-NEXT:    slli a1, a2, 32
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_32
-; CHECK-RV64-NEXT:    j .LBB61_558
-; CHECK-RV64-NEXT:  .LBB61_32: # %else122
-; CHECK-RV64-NEXT:    slli a1, a2, 31
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_33
-; CHECK-RV64-NEXT:    j .LBB61_559
-; CHECK-RV64-NEXT:  .LBB61_33: # %else126
-; CHECK-RV64-NEXT:    slli a1, a2, 30
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_34
-; CHECK-RV64-NEXT:    j .LBB61_560
-; CHECK-RV64-NEXT:  .LBB61_34: # %else130
-; CHECK-RV64-NEXT:    slli a1, a2, 29
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_35
-; CHECK-RV64-NEXT:    j .LBB61_561
-; CHECK-RV64-NEXT:  .LBB61_35: # %else134
-; CHECK-RV64-NEXT:    slli a1, a2, 28
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_36
-; CHECK-RV64-NEXT:    j .LBB61_562
-; CHECK-RV64-NEXT:  .LBB61_36: # %else138
-; CHECK-RV64-NEXT:    slli a1, a2, 27
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_37
-; CHECK-RV64-NEXT:    j .LBB61_563
-; CHECK-RV64-NEXT:  .LBB61_37: # %else142
-; CHECK-RV64-NEXT:    slli a1, a2, 26
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_38
-; CHECK-RV64-NEXT:    j .LBB61_564
-; CHECK-RV64-NEXT:  .LBB61_38: # %else146
-; CHECK-RV64-NEXT:    slli a1, a2, 25
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_39
-; CHECK-RV64-NEXT:    j .LBB61_565
-; CHECK-RV64-NEXT:  .LBB61_39: # %else150
-; CHECK-RV64-NEXT:    slli a1, a2, 24
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_40
-; CHECK-RV64-NEXT:    j .LBB61_566
-; CHECK-RV64-NEXT:  .LBB61_40: # %else154
-; CHECK-RV64-NEXT:    slli a1, a2, 23
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_41
-; CHECK-RV64-NEXT:    j .LBB61_567
-; CHECK-RV64-NEXT:  .LBB61_41: # %else158
-; CHECK-RV64-NEXT:    slli a1, a2, 22
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_42
-; CHECK-RV64-NEXT:    j .LBB61_568
-; CHECK-RV64-NEXT:  .LBB61_42: # %else162
-; CHECK-RV64-NEXT:    slli a1, a2, 21
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_43
-; CHECK-RV64-NEXT:    j .LBB61_569
-; CHECK-RV64-NEXT:  .LBB61_43: # %else166
-; CHECK-RV64-NEXT:    slli a1, a2, 20
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_44
-; CHECK-RV64-NEXT:    j .LBB61_570
-; CHECK-RV64-NEXT:  .LBB61_44: # %else170
-; CHECK-RV64-NEXT:    slli a1, a2, 19
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_45
-; CHECK-RV64-NEXT:    j .LBB61_571
-; CHECK-RV64-NEXT:  .LBB61_45: # %else174
-; CHECK-RV64-NEXT:    slli a1, a2, 18
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_46
-; CHECK-RV64-NEXT:    j .LBB61_572
-; CHECK-RV64-NEXT:  .LBB61_46: # %else178
-; CHECK-RV64-NEXT:    slli a1, a2, 17
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_47
-; CHECK-RV64-NEXT:    j .LBB61_573
-; CHECK-RV64-NEXT:  .LBB61_47: # %else182
-; CHECK-RV64-NEXT:    slli a1, a2, 16
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_48
-; CHECK-RV64-NEXT:    j .LBB61_574
-; CHECK-RV64-NEXT:  .LBB61_48: # %else186
-; CHECK-RV64-NEXT:    slli a1, a2, 15
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_49
-; CHECK-RV64-NEXT:    j .LBB61_575
-; CHECK-RV64-NEXT:  .LBB61_49: # %else190
-; CHECK-RV64-NEXT:    slli a1, a2, 14
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_50
-; CHECK-RV64-NEXT:    j .LBB61_576
-; CHECK-RV64-NEXT:  .LBB61_50: # %else194
-; CHECK-RV64-NEXT:    slli a1, a2, 13
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_51
-; CHECK-RV64-NEXT:    j .LBB61_577
-; CHECK-RV64-NEXT:  .LBB61_51: # %else198
-; CHECK-RV64-NEXT:    slli a1, a2, 12
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_52
-; CHECK-RV64-NEXT:    j .LBB61_578
-; CHECK-RV64-NEXT:  .LBB61_52: # %else202
-; CHECK-RV64-NEXT:    slli a1, a2, 11
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_53
-; CHECK-RV64-NEXT:    j .LBB61_579
-; CHECK-RV64-NEXT:  .LBB61_53: # %else206
-; CHECK-RV64-NEXT:    slli a1, a2, 10
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_54
-; CHECK-RV64-NEXT:    j .LBB61_580
-; CHECK-RV64-NEXT:  .LBB61_54: # %else210
-; CHECK-RV64-NEXT:    slli a1, a2, 9
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_55
-; CHECK-RV64-NEXT:    j .LBB61_581
-; CHECK-RV64-NEXT:  .LBB61_55: # %else214
-; CHECK-RV64-NEXT:    slli a1, a2, 8
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_56
-; CHECK-RV64-NEXT:    j .LBB61_582
-; CHECK-RV64-NEXT:  .LBB61_56: # %else218
-; CHECK-RV64-NEXT:    slli a1, a2, 7
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_57
-; CHECK-RV64-NEXT:    j .LBB61_583
-; CHECK-RV64-NEXT:  .LBB61_57: # %else222
-; CHECK-RV64-NEXT:    slli a1, a2, 6
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_58
-; CHECK-RV64-NEXT:    j .LBB61_584
-; CHECK-RV64-NEXT:  .LBB61_58: # %else226
-; CHECK-RV64-NEXT:    slli a1, a2, 5
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_59
-; CHECK-RV64-NEXT:    j .LBB61_585
-; CHECK-RV64-NEXT:  .LBB61_59: # %else230
-; CHECK-RV64-NEXT:    slli a1, a2, 4
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_60
-; CHECK-RV64-NEXT:    j .LBB61_586
-; CHECK-RV64-NEXT:  .LBB61_60: # %else234
-; CHECK-RV64-NEXT:    slli a1, a2, 3
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_61
-; CHECK-RV64-NEXT:    j .LBB61_587
-; CHECK-RV64-NEXT:  .LBB61_61: # %else238
-; CHECK-RV64-NEXT:    slli a1, a2, 2
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_63
-; CHECK-RV64-NEXT:  .LBB61_62: # %cond.load241
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a1, 62
-; CHECK-RV64-NEXT:    li a3, 61
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:  .LBB61_63: # %else242
-; CHECK-RV64-NEXT:    slli a1, a2, 1
-; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV64-NEXT:    vslidedown.vi v16, v0, 1
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_65
-; CHECK-RV64-NEXT:  # %bb.64: # %cond.load245
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    vmv.s.x v17, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 63
-; CHECK-RV64-NEXT:    li a3, 62
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v17, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:  .LBB61_65: # %else246
-; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.x.s a1, v16
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_66
-; CHECK-RV64-NEXT:    j .LBB61_588
-; CHECK-RV64-NEXT:  .LBB61_66: # %else250
-; CHECK-RV64-NEXT:    andi a2, a1, 1
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_67
-; CHECK-RV64-NEXT:    j .LBB61_589
-; CHECK-RV64-NEXT:  .LBB61_67: # %else254
-; CHECK-RV64-NEXT:    andi a2, a1, 2
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_68
-; CHECK-RV64-NEXT:    j .LBB61_590
-; CHECK-RV64-NEXT:  .LBB61_68: # %else258
-; CHECK-RV64-NEXT:    andi a2, a1, 4
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_69
-; CHECK-RV64-NEXT:    j .LBB61_591
-; CHECK-RV64-NEXT:  .LBB61_69: # %else262
-; CHECK-RV64-NEXT:    andi a2, a1, 8
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_70
-; CHECK-RV64-NEXT:    j .LBB61_592
-; CHECK-RV64-NEXT:  .LBB61_70: # %else266
-; CHECK-RV64-NEXT:    andi a2, a1, 16
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_71
-; CHECK-RV64-NEXT:    j .LBB61_593
-; CHECK-RV64-NEXT:  .LBB61_71: # %else270
-; CHECK-RV64-NEXT:    andi a2, a1, 32
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_72
-; CHECK-RV64-NEXT:    j .LBB61_594
-; CHECK-RV64-NEXT:  .LBB61_72: # %else274
-; CHECK-RV64-NEXT:    andi a2, a1, 64
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_73
-; CHECK-RV64-NEXT:    j .LBB61_595
-; CHECK-RV64-NEXT:  .LBB61_73: # %else278
-; CHECK-RV64-NEXT:    andi a2, a1, 128
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_74
-; CHECK-RV64-NEXT:    j .LBB61_596
-; CHECK-RV64-NEXT:  .LBB61_74: # %else282
-; CHECK-RV64-NEXT:    andi a2, a1, 256
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_75
-; CHECK-RV64-NEXT:    j .LBB61_597
-; CHECK-RV64-NEXT:  .LBB61_75: # %else286
-; CHECK-RV64-NEXT:    andi a2, a1, 512
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_76
-; CHECK-RV64-NEXT:    j .LBB61_598
-; CHECK-RV64-NEXT:  .LBB61_76: # %else290
-; CHECK-RV64-NEXT:    andi a2, a1, 1024
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_77
-; CHECK-RV64-NEXT:    j .LBB61_599
-; CHECK-RV64-NEXT:  .LBB61_77: # %else294
-; CHECK-RV64-NEXT:    slli a2, a1, 52
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_78
-; CHECK-RV64-NEXT:    j .LBB61_600
-; CHECK-RV64-NEXT:  .LBB61_78: # %else298
-; CHECK-RV64-NEXT:    slli a2, a1, 51
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_79
-; CHECK-RV64-NEXT:    j .LBB61_601
-; CHECK-RV64-NEXT:  .LBB61_79: # %else302
-; CHECK-RV64-NEXT:    slli a2, a1, 50
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_80
-; CHECK-RV64-NEXT:    j .LBB61_602
-; CHECK-RV64-NEXT:  .LBB61_80: # %else306
-; CHECK-RV64-NEXT:    slli a2, a1, 49
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_81
-; CHECK-RV64-NEXT:    j .LBB61_603
-; CHECK-RV64-NEXT:  .LBB61_81: # %else310
-; CHECK-RV64-NEXT:    slli a2, a1, 48
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_82
-; CHECK-RV64-NEXT:    j .LBB61_604
-; CHECK-RV64-NEXT:  .LBB61_82: # %else314
-; CHECK-RV64-NEXT:    slli a2, a1, 47
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_83
-; CHECK-RV64-NEXT:    j .LBB61_605
-; CHECK-RV64-NEXT:  .LBB61_83: # %else318
-; CHECK-RV64-NEXT:    slli a2, a1, 46
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_84
-; CHECK-RV64-NEXT:    j .LBB61_606
-; CHECK-RV64-NEXT:  .LBB61_84: # %else322
-; CHECK-RV64-NEXT:    slli a2, a1, 45
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_85
-; CHECK-RV64-NEXT:    j .LBB61_607
-; CHECK-RV64-NEXT:  .LBB61_85: # %else326
-; CHECK-RV64-NEXT:    slli a2, a1, 44
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_86
-; CHECK-RV64-NEXT:    j .LBB61_608
-; CHECK-RV64-NEXT:  .LBB61_86: # %else330
-; CHECK-RV64-NEXT:    slli a2, a1, 43
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_87
-; CHECK-RV64-NEXT:    j .LBB61_609
-; CHECK-RV64-NEXT:  .LBB61_87: # %else334
-; CHECK-RV64-NEXT:    slli a2, a1, 42
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_88
-; CHECK-RV64-NEXT:    j .LBB61_610
-; CHECK-RV64-NEXT:  .LBB61_88: # %else338
-; CHECK-RV64-NEXT:    slli a2, a1, 41
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_89
-; CHECK-RV64-NEXT:    j .LBB61_611
-; CHECK-RV64-NEXT:  .LBB61_89: # %else342
-; CHECK-RV64-NEXT:    slli a2, a1, 40
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_90
-; CHECK-RV64-NEXT:    j .LBB61_612
-; CHECK-RV64-NEXT:  .LBB61_90: # %else346
-; CHECK-RV64-NEXT:    slli a2, a1, 39
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_91
-; CHECK-RV64-NEXT:    j .LBB61_613
-; CHECK-RV64-NEXT:  .LBB61_91: # %else350
-; CHECK-RV64-NEXT:    slli a2, a1, 38
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_92
-; CHECK-RV64-NEXT:    j .LBB61_614
-; CHECK-RV64-NEXT:  .LBB61_92: # %else354
-; CHECK-RV64-NEXT:    slli a2, a1, 37
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_93
-; CHECK-RV64-NEXT:    j .LBB61_615
-; CHECK-RV64-NEXT:  .LBB61_93: # %else358
-; CHECK-RV64-NEXT:    slli a2, a1, 36
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_94
-; CHECK-RV64-NEXT:    j .LBB61_616
-; CHECK-RV64-NEXT:  .LBB61_94: # %else362
-; CHECK-RV64-NEXT:    slli a2, a1, 35
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_95
-; CHECK-RV64-NEXT:    j .LBB61_617
-; CHECK-RV64-NEXT:  .LBB61_95: # %else366
-; CHECK-RV64-NEXT:    slli a2, a1, 34
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_96
-; CHECK-RV64-NEXT:    j .LBB61_618
-; CHECK-RV64-NEXT:  .LBB61_96: # %else370
-; CHECK-RV64-NEXT:    slli a2, a1, 33
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_97
-; CHECK-RV64-NEXT:    j .LBB61_619
-; CHECK-RV64-NEXT:  .LBB61_97: # %else374
-; CHECK-RV64-NEXT:    slli a2, a1, 32
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_98
-; CHECK-RV64-NEXT:    j .LBB61_620
-; CHECK-RV64-NEXT:  .LBB61_98: # %else378
-; CHECK-RV64-NEXT:    slli a2, a1, 31
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_99
-; CHECK-RV64-NEXT:    j .LBB61_621
-; CHECK-RV64-NEXT:  .LBB61_99: # %else382
-; CHECK-RV64-NEXT:    slli a2, a1, 30
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_100
-; CHECK-RV64-NEXT:    j .LBB61_622
-; CHECK-RV64-NEXT:  .LBB61_100: # %else386
-; CHECK-RV64-NEXT:    slli a2, a1, 29
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_101
-; CHECK-RV64-NEXT:    j .LBB61_623
-; CHECK-RV64-NEXT:  .LBB61_101: # %else390
-; CHECK-RV64-NEXT:    slli a2, a1, 28
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_102
-; CHECK-RV64-NEXT:    j .LBB61_624
-; CHECK-RV64-NEXT:  .LBB61_102: # %else394
-; CHECK-RV64-NEXT:    slli a2, a1, 27
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_103
-; CHECK-RV64-NEXT:    j .LBB61_625
-; CHECK-RV64-NEXT:  .LBB61_103: # %else398
-; CHECK-RV64-NEXT:    slli a2, a1, 26
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_104
-; CHECK-RV64-NEXT:    j .LBB61_626
-; CHECK-RV64-NEXT:  .LBB61_104: # %else402
-; CHECK-RV64-NEXT:    slli a2, a1, 25
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_105
-; CHECK-RV64-NEXT:    j .LBB61_627
-; CHECK-RV64-NEXT:  .LBB61_105: # %else406
-; CHECK-RV64-NEXT:    slli a2, a1, 24
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_106
-; CHECK-RV64-NEXT:    j .LBB61_628
-; CHECK-RV64-NEXT:  .LBB61_106: # %else410
-; CHECK-RV64-NEXT:    slli a2, a1, 23
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_107
-; CHECK-RV64-NEXT:    j .LBB61_629
-; CHECK-RV64-NEXT:  .LBB61_107: # %else414
-; CHECK-RV64-NEXT:    slli a2, a1, 22
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_108
-; CHECK-RV64-NEXT:    j .LBB61_630
-; CHECK-RV64-NEXT:  .LBB61_108: # %else418
-; CHECK-RV64-NEXT:    slli a2, a1, 21
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_109
-; CHECK-RV64-NEXT:    j .LBB61_631
-; CHECK-RV64-NEXT:  .LBB61_109: # %else422
-; CHECK-RV64-NEXT:    slli a2, a1, 20
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_110
-; CHECK-RV64-NEXT:    j .LBB61_632
-; CHECK-RV64-NEXT:  .LBB61_110: # %else426
-; CHECK-RV64-NEXT:    slli a2, a1, 19
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_111
-; CHECK-RV64-NEXT:    j .LBB61_633
-; CHECK-RV64-NEXT:  .LBB61_111: # %else430
-; CHECK-RV64-NEXT:    slli a2, a1, 18
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_112
-; CHECK-RV64-NEXT:    j .LBB61_634
-; CHECK-RV64-NEXT:  .LBB61_112: # %else434
-; CHECK-RV64-NEXT:    slli a2, a1, 17
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_113
-; CHECK-RV64-NEXT:    j .LBB61_635
-; CHECK-RV64-NEXT:  .LBB61_113: # %else438
-; CHECK-RV64-NEXT:    slli a2, a1, 16
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_114
-; CHECK-RV64-NEXT:    j .LBB61_636
-; CHECK-RV64-NEXT:  .LBB61_114: # %else442
-; CHECK-RV64-NEXT:    slli a2, a1, 15
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_115
-; CHECK-RV64-NEXT:    j .LBB61_637
-; CHECK-RV64-NEXT:  .LBB61_115: # %else446
-; CHECK-RV64-NEXT:    slli a2, a1, 14
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_116
-; CHECK-RV64-NEXT:    j .LBB61_638
-; CHECK-RV64-NEXT:  .LBB61_116: # %else450
-; CHECK-RV64-NEXT:    slli a2, a1, 13
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_117
-; CHECK-RV64-NEXT:    j .LBB61_639
-; CHECK-RV64-NEXT:  .LBB61_117: # %else454
-; CHECK-RV64-NEXT:    slli a2, a1, 12
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_118
-; CHECK-RV64-NEXT:    j .LBB61_640
-; CHECK-RV64-NEXT:  .LBB61_118: # %else458
-; CHECK-RV64-NEXT:    slli a2, a1, 11
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_119
-; CHECK-RV64-NEXT:    j .LBB61_641
-; CHECK-RV64-NEXT:  .LBB61_119: # %else462
-; CHECK-RV64-NEXT:    slli a2, a1, 10
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_120
-; CHECK-RV64-NEXT:    j .LBB61_642
-; CHECK-RV64-NEXT:  .LBB61_120: # %else466
-; CHECK-RV64-NEXT:    slli a2, a1, 9
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_121
-; CHECK-RV64-NEXT:    j .LBB61_643
-; CHECK-RV64-NEXT:  .LBB61_121: # %else470
-; CHECK-RV64-NEXT:    slli a2, a1, 8
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_122
-; CHECK-RV64-NEXT:    j .LBB61_644
-; CHECK-RV64-NEXT:  .LBB61_122: # %else474
-; CHECK-RV64-NEXT:    slli a2, a1, 7
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_123
-; CHECK-RV64-NEXT:    j .LBB61_645
-; CHECK-RV64-NEXT:  .LBB61_123: # %else478
-; CHECK-RV64-NEXT:    slli a2, a1, 6
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_124
-; CHECK-RV64-NEXT:    j .LBB61_646
-; CHECK-RV64-NEXT:  .LBB61_124: # %else482
-; CHECK-RV64-NEXT:    slli a2, a1, 5
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_125
-; CHECK-RV64-NEXT:    j .LBB61_647
-; CHECK-RV64-NEXT:  .LBB61_125: # %else486
-; CHECK-RV64-NEXT:    slli a2, a1, 4
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_126
-; CHECK-RV64-NEXT:    j .LBB61_648
-; CHECK-RV64-NEXT:  .LBB61_126: # %else490
-; CHECK-RV64-NEXT:    slli a2, a1, 3
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_127
-; CHECK-RV64-NEXT:    j .LBB61_649
-; CHECK-RV64-NEXT:  .LBB61_127: # %else494
-; CHECK-RV64-NEXT:    slli a2, a1, 2
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_129
-; CHECK-RV64-NEXT:  .LBB61_128: # %cond.load497
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 126
-; CHECK-RV64-NEXT:    li a3, 125
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:  .LBB61_129: # %else498
-; CHECK-RV64-NEXT:    slli a2, a1, 1
-; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV64-NEXT:    vslidedown.vi v16, v0, 2
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_131
-; CHECK-RV64-NEXT:  # %bb.130: # %cond.load501
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    vmv.s.x v18, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 127
-; CHECK-RV64-NEXT:    li a3, 126
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v18, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:  .LBB61_131: # %else502
-; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.x.s a2, v16
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_132
-; CHECK-RV64-NEXT:    j .LBB61_650
-; CHECK-RV64-NEXT:  .LBB61_132: # %else506
-; CHECK-RV64-NEXT:    andi a1, a2, 1
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_133
-; CHECK-RV64-NEXT:    j .LBB61_651
-; CHECK-RV64-NEXT:  .LBB61_133: # %else510
-; CHECK-RV64-NEXT:    andi a1, a2, 2
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_134
-; CHECK-RV64-NEXT:    j .LBB61_652
-; CHECK-RV64-NEXT:  .LBB61_134: # %else514
-; CHECK-RV64-NEXT:    andi a1, a2, 4
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_135
-; CHECK-RV64-NEXT:    j .LBB61_653
-; CHECK-RV64-NEXT:  .LBB61_135: # %else518
-; CHECK-RV64-NEXT:    andi a1, a2, 8
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_136
-; CHECK-RV64-NEXT:    j .LBB61_654
-; CHECK-RV64-NEXT:  .LBB61_136: # %else522
-; CHECK-RV64-NEXT:    andi a1, a2, 16
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_137
-; CHECK-RV64-NEXT:    j .LBB61_655
-; CHECK-RV64-NEXT:  .LBB61_137: # %else526
-; CHECK-RV64-NEXT:    andi a1, a2, 32
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_138
-; CHECK-RV64-NEXT:    j .LBB61_656
-; CHECK-RV64-NEXT:  .LBB61_138: # %else530
-; CHECK-RV64-NEXT:    andi a1, a2, 64
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_139
-; CHECK-RV64-NEXT:    j .LBB61_657
-; CHECK-RV64-NEXT:  .LBB61_139: # %else534
-; CHECK-RV64-NEXT:    andi a1, a2, 128
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_140
-; CHECK-RV64-NEXT:    j .LBB61_658
-; CHECK-RV64-NEXT:  .LBB61_140: # %else538
-; CHECK-RV64-NEXT:    andi a1, a2, 256
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_141
-; CHECK-RV64-NEXT:    j .LBB61_659
-; CHECK-RV64-NEXT:  .LBB61_141: # %else542
-; CHECK-RV64-NEXT:    andi a1, a2, 512
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_142
-; CHECK-RV64-NEXT:    j .LBB61_660
-; CHECK-RV64-NEXT:  .LBB61_142: # %else546
-; CHECK-RV64-NEXT:    andi a1, a2, 1024
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_143
-; CHECK-RV64-NEXT:    j .LBB61_661
-; CHECK-RV64-NEXT:  .LBB61_143: # %else550
-; CHECK-RV64-NEXT:    slli a1, a2, 52
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_144
-; CHECK-RV64-NEXT:    j .LBB61_662
-; CHECK-RV64-NEXT:  .LBB61_144: # %else554
-; CHECK-RV64-NEXT:    slli a1, a2, 51
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_145
-; CHECK-RV64-NEXT:    j .LBB61_663
-; CHECK-RV64-NEXT:  .LBB61_145: # %else558
-; CHECK-RV64-NEXT:    slli a1, a2, 50
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_146
-; CHECK-RV64-NEXT:    j .LBB61_664
-; CHECK-RV64-NEXT:  .LBB61_146: # %else562
-; CHECK-RV64-NEXT:    slli a1, a2, 49
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_147
-; CHECK-RV64-NEXT:    j .LBB61_665
-; CHECK-RV64-NEXT:  .LBB61_147: # %else566
-; CHECK-RV64-NEXT:    slli a1, a2, 48
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_148
-; CHECK-RV64-NEXT:    j .LBB61_666
-; CHECK-RV64-NEXT:  .LBB61_148: # %else570
-; CHECK-RV64-NEXT:    slli a1, a2, 47
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_149
-; CHECK-RV64-NEXT:    j .LBB61_667
-; CHECK-RV64-NEXT:  .LBB61_149: # %else574
-; CHECK-RV64-NEXT:    slli a1, a2, 46
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_150
-; CHECK-RV64-NEXT:    j .LBB61_668
-; CHECK-RV64-NEXT:  .LBB61_150: # %else578
-; CHECK-RV64-NEXT:    slli a1, a2, 45
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_151
-; CHECK-RV64-NEXT:    j .LBB61_669
-; CHECK-RV64-NEXT:  .LBB61_151: # %else582
-; CHECK-RV64-NEXT:    slli a1, a2, 44
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_152
-; CHECK-RV64-NEXT:    j .LBB61_670
-; CHECK-RV64-NEXT:  .LBB61_152: # %else586
-; CHECK-RV64-NEXT:    slli a1, a2, 43
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_153
-; CHECK-RV64-NEXT:    j .LBB61_671
-; CHECK-RV64-NEXT:  .LBB61_153: # %else590
-; CHECK-RV64-NEXT:    slli a1, a2, 42
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_154
-; CHECK-RV64-NEXT:    j .LBB61_672
-; CHECK-RV64-NEXT:  .LBB61_154: # %else594
-; CHECK-RV64-NEXT:    slli a1, a2, 41
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_155
-; CHECK-RV64-NEXT:    j .LBB61_673
-; CHECK-RV64-NEXT:  .LBB61_155: # %else598
-; CHECK-RV64-NEXT:    slli a1, a2, 40
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_156
-; CHECK-RV64-NEXT:    j .LBB61_674
-; CHECK-RV64-NEXT:  .LBB61_156: # %else602
-; CHECK-RV64-NEXT:    slli a1, a2, 39
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_157
-; CHECK-RV64-NEXT:    j .LBB61_675
-; CHECK-RV64-NEXT:  .LBB61_157: # %else606
-; CHECK-RV64-NEXT:    slli a1, a2, 38
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_158
-; CHECK-RV64-NEXT:    j .LBB61_676
-; CHECK-RV64-NEXT:  .LBB61_158: # %else610
-; CHECK-RV64-NEXT:    slli a1, a2, 37
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_159
-; CHECK-RV64-NEXT:    j .LBB61_677
-; CHECK-RV64-NEXT:  .LBB61_159: # %else614
-; CHECK-RV64-NEXT:    slli a1, a2, 36
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_160
-; CHECK-RV64-NEXT:    j .LBB61_678
-; CHECK-RV64-NEXT:  .LBB61_160: # %else618
-; CHECK-RV64-NEXT:    slli a1, a2, 35
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_161
-; CHECK-RV64-NEXT:    j .LBB61_679
-; CHECK-RV64-NEXT:  .LBB61_161: # %else622
-; CHECK-RV64-NEXT:    slli a1, a2, 34
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_162
-; CHECK-RV64-NEXT:    j .LBB61_680
-; CHECK-RV64-NEXT:  .LBB61_162: # %else626
-; CHECK-RV64-NEXT:    slli a1, a2, 33
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_163
-; CHECK-RV64-NEXT:    j .LBB61_681
-; CHECK-RV64-NEXT:  .LBB61_163: # %else630
-; CHECK-RV64-NEXT:    slli a1, a2, 32
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_164
-; CHECK-RV64-NEXT:    j .LBB61_682
-; CHECK-RV64-NEXT:  .LBB61_164: # %else634
-; CHECK-RV64-NEXT:    slli a1, a2, 31
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_165
-; CHECK-RV64-NEXT:    j .LBB61_683
-; CHECK-RV64-NEXT:  .LBB61_165: # %else638
-; CHECK-RV64-NEXT:    slli a1, a2, 30
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_166
-; CHECK-RV64-NEXT:    j .LBB61_684
-; CHECK-RV64-NEXT:  .LBB61_166: # %else642
-; CHECK-RV64-NEXT:    slli a1, a2, 29
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_167
-; CHECK-RV64-NEXT:    j .LBB61_685
-; CHECK-RV64-NEXT:  .LBB61_167: # %else646
-; CHECK-RV64-NEXT:    slli a1, a2, 28
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_168
-; CHECK-RV64-NEXT:    j .LBB61_686
-; CHECK-RV64-NEXT:  .LBB61_168: # %else650
-; CHECK-RV64-NEXT:    slli a1, a2, 27
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_169
-; CHECK-RV64-NEXT:    j .LBB61_687
-; CHECK-RV64-NEXT:  .LBB61_169: # %else654
-; CHECK-RV64-NEXT:    slli a1, a2, 26
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_170
-; CHECK-RV64-NEXT:    j .LBB61_688
-; CHECK-RV64-NEXT:  .LBB61_170: # %else658
-; CHECK-RV64-NEXT:    slli a1, a2, 25
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_171
-; CHECK-RV64-NEXT:    j .LBB61_689
-; CHECK-RV64-NEXT:  .LBB61_171: # %else662
-; CHECK-RV64-NEXT:    slli a1, a2, 24
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_172
-; CHECK-RV64-NEXT:    j .LBB61_690
-; CHECK-RV64-NEXT:  .LBB61_172: # %else666
-; CHECK-RV64-NEXT:    slli a1, a2, 23
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_173
-; CHECK-RV64-NEXT:    j .LBB61_691
-; CHECK-RV64-NEXT:  .LBB61_173: # %else670
-; CHECK-RV64-NEXT:    slli a1, a2, 22
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_174
-; CHECK-RV64-NEXT:    j .LBB61_692
-; CHECK-RV64-NEXT:  .LBB61_174: # %else674
-; CHECK-RV64-NEXT:    slli a1, a2, 21
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_175
-; CHECK-RV64-NEXT:    j .LBB61_693
-; CHECK-RV64-NEXT:  .LBB61_175: # %else678
-; CHECK-RV64-NEXT:    slli a1, a2, 20
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_176
-; CHECK-RV64-NEXT:    j .LBB61_694
-; CHECK-RV64-NEXT:  .LBB61_176: # %else682
-; CHECK-RV64-NEXT:    slli a1, a2, 19
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_177
-; CHECK-RV64-NEXT:    j .LBB61_695
-; CHECK-RV64-NEXT:  .LBB61_177: # %else686
-; CHECK-RV64-NEXT:    slli a1, a2, 18
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_178
-; CHECK-RV64-NEXT:    j .LBB61_696
-; CHECK-RV64-NEXT:  .LBB61_178: # %else690
-; CHECK-RV64-NEXT:    slli a1, a2, 17
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_179
-; CHECK-RV64-NEXT:    j .LBB61_697
-; CHECK-RV64-NEXT:  .LBB61_179: # %else694
-; CHECK-RV64-NEXT:    slli a1, a2, 16
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_180
-; CHECK-RV64-NEXT:    j .LBB61_698
-; CHECK-RV64-NEXT:  .LBB61_180: # %else698
-; CHECK-RV64-NEXT:    slli a1, a2, 15
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_181
-; CHECK-RV64-NEXT:    j .LBB61_699
-; CHECK-RV64-NEXT:  .LBB61_181: # %else702
-; CHECK-RV64-NEXT:    slli a1, a2, 14
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_182
-; CHECK-RV64-NEXT:    j .LBB61_700
-; CHECK-RV64-NEXT:  .LBB61_182: # %else706
-; CHECK-RV64-NEXT:    slli a1, a2, 13
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_183
-; CHECK-RV64-NEXT:    j .LBB61_701
-; CHECK-RV64-NEXT:  .LBB61_183: # %else710
-; CHECK-RV64-NEXT:    slli a1, a2, 12
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_184
-; CHECK-RV64-NEXT:    j .LBB61_702
-; CHECK-RV64-NEXT:  .LBB61_184: # %else714
-; CHECK-RV64-NEXT:    slli a1, a2, 11
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_185
-; CHECK-RV64-NEXT:    j .LBB61_703
-; CHECK-RV64-NEXT:  .LBB61_185: # %else718
-; CHECK-RV64-NEXT:    slli a1, a2, 10
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_186
-; CHECK-RV64-NEXT:    j .LBB61_704
-; CHECK-RV64-NEXT:  .LBB61_186: # %else722
-; CHECK-RV64-NEXT:    slli a1, a2, 9
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_187
-; CHECK-RV64-NEXT:    j .LBB61_705
-; CHECK-RV64-NEXT:  .LBB61_187: # %else726
-; CHECK-RV64-NEXT:    slli a1, a2, 8
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_188
-; CHECK-RV64-NEXT:    j .LBB61_706
-; CHECK-RV64-NEXT:  .LBB61_188: # %else730
-; CHECK-RV64-NEXT:    slli a1, a2, 7
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_189
-; CHECK-RV64-NEXT:    j .LBB61_707
-; CHECK-RV64-NEXT:  .LBB61_189: # %else734
-; CHECK-RV64-NEXT:    slli a1, a2, 6
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_190
-; CHECK-RV64-NEXT:    j .LBB61_708
-; CHECK-RV64-NEXT:  .LBB61_190: # %else738
-; CHECK-RV64-NEXT:    slli a1, a2, 5
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_191
-; CHECK-RV64-NEXT:    j .LBB61_709
-; CHECK-RV64-NEXT:  .LBB61_191: # %else742
-; CHECK-RV64-NEXT:    slli a1, a2, 4
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_192
-; CHECK-RV64-NEXT:    j .LBB61_710
-; CHECK-RV64-NEXT:  .LBB61_192: # %else746
-; CHECK-RV64-NEXT:    slli a1, a2, 3
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_193
-; CHECK-RV64-NEXT:    j .LBB61_711
-; CHECK-RV64-NEXT:  .LBB61_193: # %else750
-; CHECK-RV64-NEXT:    slli a1, a2, 2
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_195
-; CHECK-RV64-NEXT:  .LBB61_194: # %cond.load753
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 190
-; CHECK-RV64-NEXT:    li a3, 189
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:  .LBB61_195: # %else754
-; CHECK-RV64-NEXT:    slli a1, a2, 1
-; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV64-NEXT:    vslidedown.vi v16, v0, 3
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_197
-; CHECK-RV64-NEXT:  # %bb.196: # %cond.load757
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    vmv.s.x v20, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 191
-; CHECK-RV64-NEXT:    li a3, 190
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v20, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:  .LBB61_197: # %else758
-; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.x.s a1, v16
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_198
-; CHECK-RV64-NEXT:    j .LBB61_712
-; CHECK-RV64-NEXT:  .LBB61_198: # %else762
-; CHECK-RV64-NEXT:    andi a2, a1, 1
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_199
-; CHECK-RV64-NEXT:    j .LBB61_713
-; CHECK-RV64-NEXT:  .LBB61_199: # %else766
-; CHECK-RV64-NEXT:    andi a2, a1, 2
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_200
-; CHECK-RV64-NEXT:    j .LBB61_714
-; CHECK-RV64-NEXT:  .LBB61_200: # %else770
-; CHECK-RV64-NEXT:    andi a2, a1, 4
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_201
-; CHECK-RV64-NEXT:    j .LBB61_715
-; CHECK-RV64-NEXT:  .LBB61_201: # %else774
-; CHECK-RV64-NEXT:    andi a2, a1, 8
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_202
-; CHECK-RV64-NEXT:    j .LBB61_716
-; CHECK-RV64-NEXT:  .LBB61_202: # %else778
-; CHECK-RV64-NEXT:    andi a2, a1, 16
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_203
-; CHECK-RV64-NEXT:    j .LBB61_717
-; CHECK-RV64-NEXT:  .LBB61_203: # %else782
-; CHECK-RV64-NEXT:    andi a2, a1, 32
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_204
-; CHECK-RV64-NEXT:    j .LBB61_718
-; CHECK-RV64-NEXT:  .LBB61_204: # %else786
-; CHECK-RV64-NEXT:    andi a2, a1, 64
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_205
-; CHECK-RV64-NEXT:    j .LBB61_719
-; CHECK-RV64-NEXT:  .LBB61_205: # %else790
-; CHECK-RV64-NEXT:    andi a2, a1, 128
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_206
-; CHECK-RV64-NEXT:    j .LBB61_720
-; CHECK-RV64-NEXT:  .LBB61_206: # %else794
-; CHECK-RV64-NEXT:    andi a2, a1, 256
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_207
-; CHECK-RV64-NEXT:    j .LBB61_721
-; CHECK-RV64-NEXT:  .LBB61_207: # %else798
-; CHECK-RV64-NEXT:    andi a2, a1, 512
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_208
-; CHECK-RV64-NEXT:    j .LBB61_722
-; CHECK-RV64-NEXT:  .LBB61_208: # %else802
-; CHECK-RV64-NEXT:    andi a2, a1, 1024
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_209
-; CHECK-RV64-NEXT:    j .LBB61_723
-; CHECK-RV64-NEXT:  .LBB61_209: # %else806
-; CHECK-RV64-NEXT:    slli a2, a1, 52
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_210
-; CHECK-RV64-NEXT:    j .LBB61_724
-; CHECK-RV64-NEXT:  .LBB61_210: # %else810
-; CHECK-RV64-NEXT:    slli a2, a1, 51
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_211
-; CHECK-RV64-NEXT:    j .LBB61_725
-; CHECK-RV64-NEXT:  .LBB61_211: # %else814
-; CHECK-RV64-NEXT:    slli a2, a1, 50
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_212
-; CHECK-RV64-NEXT:    j .LBB61_726
-; CHECK-RV64-NEXT:  .LBB61_212: # %else818
-; CHECK-RV64-NEXT:    slli a2, a1, 49
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_213
-; CHECK-RV64-NEXT:    j .LBB61_727
-; CHECK-RV64-NEXT:  .LBB61_213: # %else822
-; CHECK-RV64-NEXT:    slli a2, a1, 48
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_214
-; CHECK-RV64-NEXT:    j .LBB61_728
-; CHECK-RV64-NEXT:  .LBB61_214: # %else826
-; CHECK-RV64-NEXT:    slli a2, a1, 47
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_215
-; CHECK-RV64-NEXT:    j .LBB61_729
-; CHECK-RV64-NEXT:  .LBB61_215: # %else830
-; CHECK-RV64-NEXT:    slli a2, a1, 46
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_216
-; CHECK-RV64-NEXT:    j .LBB61_730
-; CHECK-RV64-NEXT:  .LBB61_216: # %else834
-; CHECK-RV64-NEXT:    slli a2, a1, 45
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_217
-; CHECK-RV64-NEXT:    j .LBB61_731
-; CHECK-RV64-NEXT:  .LBB61_217: # %else838
-; CHECK-RV64-NEXT:    slli a2, a1, 44
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_218
-; CHECK-RV64-NEXT:    j .LBB61_732
-; CHECK-RV64-NEXT:  .LBB61_218: # %else842
-; CHECK-RV64-NEXT:    slli a2, a1, 43
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_219
-; CHECK-RV64-NEXT:    j .LBB61_733
-; CHECK-RV64-NEXT:  .LBB61_219: # %else846
-; CHECK-RV64-NEXT:    slli a2, a1, 42
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_220
-; CHECK-RV64-NEXT:    j .LBB61_734
-; CHECK-RV64-NEXT:  .LBB61_220: # %else850
-; CHECK-RV64-NEXT:    slli a2, a1, 41
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_221
-; CHECK-RV64-NEXT:    j .LBB61_735
-; CHECK-RV64-NEXT:  .LBB61_221: # %else854
-; CHECK-RV64-NEXT:    slli a2, a1, 40
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_222
-; CHECK-RV64-NEXT:    j .LBB61_736
-; CHECK-RV64-NEXT:  .LBB61_222: # %else858
-; CHECK-RV64-NEXT:    slli a2, a1, 39
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_223
-; CHECK-RV64-NEXT:    j .LBB61_737
-; CHECK-RV64-NEXT:  .LBB61_223: # %else862
-; CHECK-RV64-NEXT:    slli a2, a1, 38
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_224
-; CHECK-RV64-NEXT:    j .LBB61_738
-; CHECK-RV64-NEXT:  .LBB61_224: # %else866
-; CHECK-RV64-NEXT:    slli a2, a1, 37
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_225
-; CHECK-RV64-NEXT:    j .LBB61_739
-; CHECK-RV64-NEXT:  .LBB61_225: # %else870
-; CHECK-RV64-NEXT:    slli a2, a1, 36
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_226
-; CHECK-RV64-NEXT:    j .LBB61_740
-; CHECK-RV64-NEXT:  .LBB61_226: # %else874
-; CHECK-RV64-NEXT:    slli a2, a1, 35
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_227
-; CHECK-RV64-NEXT:    j .LBB61_741
-; CHECK-RV64-NEXT:  .LBB61_227: # %else878
-; CHECK-RV64-NEXT:    slli a2, a1, 34
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_228
-; CHECK-RV64-NEXT:    j .LBB61_742
-; CHECK-RV64-NEXT:  .LBB61_228: # %else882
-; CHECK-RV64-NEXT:    slli a2, a1, 33
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_229
-; CHECK-RV64-NEXT:    j .LBB61_743
-; CHECK-RV64-NEXT:  .LBB61_229: # %else886
-; CHECK-RV64-NEXT:    slli a2, a1, 32
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_230
-; CHECK-RV64-NEXT:    j .LBB61_744
-; CHECK-RV64-NEXT:  .LBB61_230: # %else890
-; CHECK-RV64-NEXT:    slli a2, a1, 31
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_231
-; CHECK-RV64-NEXT:    j .LBB61_745
-; CHECK-RV64-NEXT:  .LBB61_231: # %else894
-; CHECK-RV64-NEXT:    slli a2, a1, 30
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_232
-; CHECK-RV64-NEXT:    j .LBB61_746
-; CHECK-RV64-NEXT:  .LBB61_232: # %else898
-; CHECK-RV64-NEXT:    slli a2, a1, 29
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_233
-; CHECK-RV64-NEXT:    j .LBB61_747
-; CHECK-RV64-NEXT:  .LBB61_233: # %else902
-; CHECK-RV64-NEXT:    slli a2, a1, 28
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_234
-; CHECK-RV64-NEXT:    j .LBB61_748
-; CHECK-RV64-NEXT:  .LBB61_234: # %else906
-; CHECK-RV64-NEXT:    slli a2, a1, 27
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_235
-; CHECK-RV64-NEXT:    j .LBB61_749
-; CHECK-RV64-NEXT:  .LBB61_235: # %else910
-; CHECK-RV64-NEXT:    slli a2, a1, 26
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_236
-; CHECK-RV64-NEXT:    j .LBB61_750
-; CHECK-RV64-NEXT:  .LBB61_236: # %else914
-; CHECK-RV64-NEXT:    slli a2, a1, 25
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_237
-; CHECK-RV64-NEXT:    j .LBB61_751
-; CHECK-RV64-NEXT:  .LBB61_237: # %else918
-; CHECK-RV64-NEXT:    slli a2, a1, 24
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_238
-; CHECK-RV64-NEXT:    j .LBB61_752
-; CHECK-RV64-NEXT:  .LBB61_238: # %else922
-; CHECK-RV64-NEXT:    slli a2, a1, 23
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_239
-; CHECK-RV64-NEXT:    j .LBB61_753
-; CHECK-RV64-NEXT:  .LBB61_239: # %else926
-; CHECK-RV64-NEXT:    slli a2, a1, 22
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_240
-; CHECK-RV64-NEXT:    j .LBB61_754
-; CHECK-RV64-NEXT:  .LBB61_240: # %else930
-; CHECK-RV64-NEXT:    slli a2, a1, 21
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_241
-; CHECK-RV64-NEXT:    j .LBB61_755
-; CHECK-RV64-NEXT:  .LBB61_241: # %else934
-; CHECK-RV64-NEXT:    slli a2, a1, 20
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_242
-; CHECK-RV64-NEXT:    j .LBB61_756
-; CHECK-RV64-NEXT:  .LBB61_242: # %else938
-; CHECK-RV64-NEXT:    slli a2, a1, 19
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_243
-; CHECK-RV64-NEXT:    j .LBB61_757
-; CHECK-RV64-NEXT:  .LBB61_243: # %else942
-; CHECK-RV64-NEXT:    slli a2, a1, 18
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_244
-; CHECK-RV64-NEXT:    j .LBB61_758
-; CHECK-RV64-NEXT:  .LBB61_244: # %else946
-; CHECK-RV64-NEXT:    slli a2, a1, 17
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_245
-; CHECK-RV64-NEXT:    j .LBB61_759
-; CHECK-RV64-NEXT:  .LBB61_245: # %else950
-; CHECK-RV64-NEXT:    slli a2, a1, 16
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_246
-; CHECK-RV64-NEXT:    j .LBB61_760
-; CHECK-RV64-NEXT:  .LBB61_246: # %else954
-; CHECK-RV64-NEXT:    slli a2, a1, 15
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_247
-; CHECK-RV64-NEXT:    j .LBB61_761
-; CHECK-RV64-NEXT:  .LBB61_247: # %else958
-; CHECK-RV64-NEXT:    slli a2, a1, 14
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_248
-; CHECK-RV64-NEXT:    j .LBB61_762
-; CHECK-RV64-NEXT:  .LBB61_248: # %else962
-; CHECK-RV64-NEXT:    slli a2, a1, 13
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_249
-; CHECK-RV64-NEXT:    j .LBB61_763
-; CHECK-RV64-NEXT:  .LBB61_249: # %else966
-; CHECK-RV64-NEXT:    slli a2, a1, 12
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_250
-; CHECK-RV64-NEXT:    j .LBB61_764
-; CHECK-RV64-NEXT:  .LBB61_250: # %else970
-; CHECK-RV64-NEXT:    slli a2, a1, 11
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_251
-; CHECK-RV64-NEXT:    j .LBB61_765
-; CHECK-RV64-NEXT:  .LBB61_251: # %else974
-; CHECK-RV64-NEXT:    slli a2, a1, 10
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_252
-; CHECK-RV64-NEXT:    j .LBB61_766
-; CHECK-RV64-NEXT:  .LBB61_252: # %else978
-; CHECK-RV64-NEXT:    slli a2, a1, 9
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_253
-; CHECK-RV64-NEXT:    j .LBB61_767
-; CHECK-RV64-NEXT:  .LBB61_253: # %else982
-; CHECK-RV64-NEXT:    slli a2, a1, 8
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_254
-; CHECK-RV64-NEXT:    j .LBB61_768
-; CHECK-RV64-NEXT:  .LBB61_254: # %else986
-; CHECK-RV64-NEXT:    slli a2, a1, 7
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_255
-; CHECK-RV64-NEXT:    j .LBB61_769
-; CHECK-RV64-NEXT:  .LBB61_255: # %else990
-; CHECK-RV64-NEXT:    slli a2, a1, 6
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_256
-; CHECK-RV64-NEXT:    j .LBB61_770
-; CHECK-RV64-NEXT:  .LBB61_256: # %else994
-; CHECK-RV64-NEXT:    slli a2, a1, 5
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_257
-; CHECK-RV64-NEXT:    j .LBB61_771
-; CHECK-RV64-NEXT:  .LBB61_257: # %else998
-; CHECK-RV64-NEXT:    slli a2, a1, 4
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_258
-; CHECK-RV64-NEXT:    j .LBB61_772
-; CHECK-RV64-NEXT:  .LBB61_258: # %else1002
-; CHECK-RV64-NEXT:    slli a2, a1, 3
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_259
-; CHECK-RV64-NEXT:    j .LBB61_773
-; CHECK-RV64-NEXT:  .LBB61_259: # %else1006
-; CHECK-RV64-NEXT:    slli a2, a1, 2
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_261
-; CHECK-RV64-NEXT:  .LBB61_260: # %cond.load1009
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 254
-; CHECK-RV64-NEXT:    li a3, 253
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:  .LBB61_261: # %else1010
-; CHECK-RV64-NEXT:    slli a2, a1, 1
-; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV64-NEXT:    vslidedown.vi v16, v0, 4
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_263
-; CHECK-RV64-NEXT:  # %bb.262: # %cond.load1013
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    vmv.s.x v20, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 255
-; CHECK-RV64-NEXT:    li a3, 254
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v20, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:  .LBB61_263: # %else1014
-; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.x.s a2, v16
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_264
-; CHECK-RV64-NEXT:    j .LBB61_774
-; CHECK-RV64-NEXT:  .LBB61_264: # %else1018
-; CHECK-RV64-NEXT:    andi a1, a2, 1
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_265
-; CHECK-RV64-NEXT:    j .LBB61_775
-; CHECK-RV64-NEXT:  .LBB61_265: # %else1022
-; CHECK-RV64-NEXT:    andi a1, a2, 2
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_266
-; CHECK-RV64-NEXT:    j .LBB61_776
-; CHECK-RV64-NEXT:  .LBB61_266: # %else1026
-; CHECK-RV64-NEXT:    andi a1, a2, 4
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_267
-; CHECK-RV64-NEXT:    j .LBB61_777
-; CHECK-RV64-NEXT:  .LBB61_267: # %else1030
-; CHECK-RV64-NEXT:    andi a1, a2, 8
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_268
-; CHECK-RV64-NEXT:    j .LBB61_778
-; CHECK-RV64-NEXT:  .LBB61_268: # %else1034
-; CHECK-RV64-NEXT:    andi a1, a2, 16
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_269
-; CHECK-RV64-NEXT:    j .LBB61_779
-; CHECK-RV64-NEXT:  .LBB61_269: # %else1038
-; CHECK-RV64-NEXT:    andi a1, a2, 32
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_270
-; CHECK-RV64-NEXT:    j .LBB61_780
-; CHECK-RV64-NEXT:  .LBB61_270: # %else1042
-; CHECK-RV64-NEXT:    andi a1, a2, 64
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_271
-; CHECK-RV64-NEXT:    j .LBB61_781
-; CHECK-RV64-NEXT:  .LBB61_271: # %else1046
-; CHECK-RV64-NEXT:    andi a1, a2, 128
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_272
-; CHECK-RV64-NEXT:    j .LBB61_782
-; CHECK-RV64-NEXT:  .LBB61_272: # %else1050
-; CHECK-RV64-NEXT:    andi a1, a2, 256
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_273
-; CHECK-RV64-NEXT:    j .LBB61_783
-; CHECK-RV64-NEXT:  .LBB61_273: # %else1054
-; CHECK-RV64-NEXT:    andi a1, a2, 512
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_274
-; CHECK-RV64-NEXT:    j .LBB61_784
-; CHECK-RV64-NEXT:  .LBB61_274: # %else1058
-; CHECK-RV64-NEXT:    andi a1, a2, 1024
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_275
-; CHECK-RV64-NEXT:    j .LBB61_785
-; CHECK-RV64-NEXT:  .LBB61_275: # %else1062
-; CHECK-RV64-NEXT:    slli a1, a2, 52
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_276
-; CHECK-RV64-NEXT:    j .LBB61_786
-; CHECK-RV64-NEXT:  .LBB61_276: # %else1066
-; CHECK-RV64-NEXT:    slli a1, a2, 51
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_277
-; CHECK-RV64-NEXT:    j .LBB61_787
-; CHECK-RV64-NEXT:  .LBB61_277: # %else1070
-; CHECK-RV64-NEXT:    slli a1, a2, 50
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_278
-; CHECK-RV64-NEXT:    j .LBB61_788
-; CHECK-RV64-NEXT:  .LBB61_278: # %else1074
-; CHECK-RV64-NEXT:    slli a1, a2, 49
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_279
-; CHECK-RV64-NEXT:    j .LBB61_789
-; CHECK-RV64-NEXT:  .LBB61_279: # %else1078
-; CHECK-RV64-NEXT:    slli a1, a2, 48
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_280
-; CHECK-RV64-NEXT:    j .LBB61_790
-; CHECK-RV64-NEXT:  .LBB61_280: # %else1082
-; CHECK-RV64-NEXT:    slli a1, a2, 47
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_281
-; CHECK-RV64-NEXT:    j .LBB61_791
-; CHECK-RV64-NEXT:  .LBB61_281: # %else1086
-; CHECK-RV64-NEXT:    slli a1, a2, 46
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_282
-; CHECK-RV64-NEXT:    j .LBB61_792
-; CHECK-RV64-NEXT:  .LBB61_282: # %else1090
-; CHECK-RV64-NEXT:    slli a1, a2, 45
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_283
-; CHECK-RV64-NEXT:    j .LBB61_793
-; CHECK-RV64-NEXT:  .LBB61_283: # %else1094
-; CHECK-RV64-NEXT:    slli a1, a2, 44
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_284
-; CHECK-RV64-NEXT:    j .LBB61_794
-; CHECK-RV64-NEXT:  .LBB61_284: # %else1098
-; CHECK-RV64-NEXT:    slli a1, a2, 43
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_285
-; CHECK-RV64-NEXT:    j .LBB61_795
-; CHECK-RV64-NEXT:  .LBB61_285: # %else1102
-; CHECK-RV64-NEXT:    slli a1, a2, 42
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_286
-; CHECK-RV64-NEXT:    j .LBB61_796
-; CHECK-RV64-NEXT:  .LBB61_286: # %else1106
-; CHECK-RV64-NEXT:    slli a1, a2, 41
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_287
-; CHECK-RV64-NEXT:    j .LBB61_797
-; CHECK-RV64-NEXT:  .LBB61_287: # %else1110
-; CHECK-RV64-NEXT:    slli a1, a2, 40
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_288
-; CHECK-RV64-NEXT:    j .LBB61_798
-; CHECK-RV64-NEXT:  .LBB61_288: # %else1114
-; CHECK-RV64-NEXT:    slli a1, a2, 39
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_289
-; CHECK-RV64-NEXT:    j .LBB61_799
-; CHECK-RV64-NEXT:  .LBB61_289: # %else1118
-; CHECK-RV64-NEXT:    slli a1, a2, 38
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_290
-; CHECK-RV64-NEXT:    j .LBB61_800
-; CHECK-RV64-NEXT:  .LBB61_290: # %else1122
-; CHECK-RV64-NEXT:    slli a1, a2, 37
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_291
-; CHECK-RV64-NEXT:    j .LBB61_801
-; CHECK-RV64-NEXT:  .LBB61_291: # %else1126
-; CHECK-RV64-NEXT:    slli a1, a2, 36
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_292
-; CHECK-RV64-NEXT:    j .LBB61_802
-; CHECK-RV64-NEXT:  .LBB61_292: # %else1130
-; CHECK-RV64-NEXT:    slli a1, a2, 35
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_293
-; CHECK-RV64-NEXT:    j .LBB61_803
-; CHECK-RV64-NEXT:  .LBB61_293: # %else1134
-; CHECK-RV64-NEXT:    slli a1, a2, 34
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_294
-; CHECK-RV64-NEXT:    j .LBB61_804
-; CHECK-RV64-NEXT:  .LBB61_294: # %else1138
-; CHECK-RV64-NEXT:    slli a1, a2, 33
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_295
-; CHECK-RV64-NEXT:    j .LBB61_805
-; CHECK-RV64-NEXT:  .LBB61_295: # %else1142
-; CHECK-RV64-NEXT:    slli a1, a2, 32
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_296
-; CHECK-RV64-NEXT:    j .LBB61_806
-; CHECK-RV64-NEXT:  .LBB61_296: # %else1146
-; CHECK-RV64-NEXT:    slli a1, a2, 31
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_297
-; CHECK-RV64-NEXT:    j .LBB61_807
-; CHECK-RV64-NEXT:  .LBB61_297: # %else1150
-; CHECK-RV64-NEXT:    slli a1, a2, 30
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_298
-; CHECK-RV64-NEXT:    j .LBB61_808
-; CHECK-RV64-NEXT:  .LBB61_298: # %else1154
-; CHECK-RV64-NEXT:    slli a1, a2, 29
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_299
-; CHECK-RV64-NEXT:    j .LBB61_809
-; CHECK-RV64-NEXT:  .LBB61_299: # %else1158
-; CHECK-RV64-NEXT:    slli a1, a2, 28
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_300
-; CHECK-RV64-NEXT:    j .LBB61_810
-; CHECK-RV64-NEXT:  .LBB61_300: # %else1162
-; CHECK-RV64-NEXT:    slli a1, a2, 27
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_301
-; CHECK-RV64-NEXT:    j .LBB61_811
-; CHECK-RV64-NEXT:  .LBB61_301: # %else1166
-; CHECK-RV64-NEXT:    slli a1, a2, 26
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_302
-; CHECK-RV64-NEXT:    j .LBB61_812
-; CHECK-RV64-NEXT:  .LBB61_302: # %else1170
-; CHECK-RV64-NEXT:    slli a1, a2, 25
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_303
-; CHECK-RV64-NEXT:    j .LBB61_813
-; CHECK-RV64-NEXT:  .LBB61_303: # %else1174
-; CHECK-RV64-NEXT:    slli a1, a2, 24
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_304
-; CHECK-RV64-NEXT:    j .LBB61_814
-; CHECK-RV64-NEXT:  .LBB61_304: # %else1178
-; CHECK-RV64-NEXT:    slli a1, a2, 23
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_305
-; CHECK-RV64-NEXT:    j .LBB61_815
-; CHECK-RV64-NEXT:  .LBB61_305: # %else1182
-; CHECK-RV64-NEXT:    slli a1, a2, 22
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_306
-; CHECK-RV64-NEXT:    j .LBB61_816
-; CHECK-RV64-NEXT:  .LBB61_306: # %else1186
-; CHECK-RV64-NEXT:    slli a1, a2, 21
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_307
-; CHECK-RV64-NEXT:    j .LBB61_817
-; CHECK-RV64-NEXT:  .LBB61_307: # %else1190
-; CHECK-RV64-NEXT:    slli a1, a2, 20
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_308
-; CHECK-RV64-NEXT:    j .LBB61_818
-; CHECK-RV64-NEXT:  .LBB61_308: # %else1194
-; CHECK-RV64-NEXT:    slli a1, a2, 19
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_309
-; CHECK-RV64-NEXT:    j .LBB61_819
-; CHECK-RV64-NEXT:  .LBB61_309: # %else1198
-; CHECK-RV64-NEXT:    slli a1, a2, 18
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_310
-; CHECK-RV64-NEXT:    j .LBB61_820
-; CHECK-RV64-NEXT:  .LBB61_310: # %else1202
-; CHECK-RV64-NEXT:    slli a1, a2, 17
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_311
-; CHECK-RV64-NEXT:    j .LBB61_821
-; CHECK-RV64-NEXT:  .LBB61_311: # %else1206
-; CHECK-RV64-NEXT:    slli a1, a2, 16
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_312
-; CHECK-RV64-NEXT:    j .LBB61_822
-; CHECK-RV64-NEXT:  .LBB61_312: # %else1210
-; CHECK-RV64-NEXT:    slli a1, a2, 15
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_313
-; CHECK-RV64-NEXT:    j .LBB61_823
-; CHECK-RV64-NEXT:  .LBB61_313: # %else1214
-; CHECK-RV64-NEXT:    slli a1, a2, 14
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_314
-; CHECK-RV64-NEXT:    j .LBB61_824
-; CHECK-RV64-NEXT:  .LBB61_314: # %else1218
-; CHECK-RV64-NEXT:    slli a1, a2, 13
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_315
-; CHECK-RV64-NEXT:    j .LBB61_825
-; CHECK-RV64-NEXT:  .LBB61_315: # %else1222
-; CHECK-RV64-NEXT:    slli a1, a2, 12
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_316
-; CHECK-RV64-NEXT:    j .LBB61_826
-; CHECK-RV64-NEXT:  .LBB61_316: # %else1226
-; CHECK-RV64-NEXT:    slli a1, a2, 11
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_317
-; CHECK-RV64-NEXT:    j .LBB61_827
-; CHECK-RV64-NEXT:  .LBB61_317: # %else1230
-; CHECK-RV64-NEXT:    slli a1, a2, 10
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_318
-; CHECK-RV64-NEXT:    j .LBB61_828
-; CHECK-RV64-NEXT:  .LBB61_318: # %else1234
-; CHECK-RV64-NEXT:    slli a1, a2, 9
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_319
-; CHECK-RV64-NEXT:    j .LBB61_829
-; CHECK-RV64-NEXT:  .LBB61_319: # %else1238
-; CHECK-RV64-NEXT:    slli a1, a2, 8
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_320
-; CHECK-RV64-NEXT:    j .LBB61_830
-; CHECK-RV64-NEXT:  .LBB61_320: # %else1242
-; CHECK-RV64-NEXT:    slli a1, a2, 7
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_321
-; CHECK-RV64-NEXT:    j .LBB61_831
-; CHECK-RV64-NEXT:  .LBB61_321: # %else1246
-; CHECK-RV64-NEXT:    slli a1, a2, 6
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_322
-; CHECK-RV64-NEXT:    j .LBB61_832
-; CHECK-RV64-NEXT:  .LBB61_322: # %else1250
-; CHECK-RV64-NEXT:    slli a1, a2, 5
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_323
-; CHECK-RV64-NEXT:    j .LBB61_833
-; CHECK-RV64-NEXT:  .LBB61_323: # %else1254
-; CHECK-RV64-NEXT:    slli a1, a2, 4
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_324
-; CHECK-RV64-NEXT:    j .LBB61_834
-; CHECK-RV64-NEXT:  .LBB61_324: # %else1258
-; CHECK-RV64-NEXT:    slli a1, a2, 3
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_325
-; CHECK-RV64-NEXT:    j .LBB61_835
-; CHECK-RV64-NEXT:  .LBB61_325: # %else1262
-; CHECK-RV64-NEXT:    slli a1, a2, 2
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_327
-; CHECK-RV64-NEXT:  .LBB61_326: # %cond.load1265
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 318
-; CHECK-RV64-NEXT:    li a3, 317
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:  .LBB61_327: # %else1266
-; CHECK-RV64-NEXT:    slli a1, a2, 1
-; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV64-NEXT:    vslidedown.vi v16, v0, 5
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_329
-; CHECK-RV64-NEXT:  # %bb.328: # %cond.load1269
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-RV64-NEXT:    li a1, 319
-; CHECK-RV64-NEXT:    li a3, 318
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:  .LBB61_329: # %else1270
-; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.x.s a1, v16
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_330
-; CHECK-RV64-NEXT:    j .LBB61_836
-; CHECK-RV64-NEXT:  .LBB61_330: # %else1274
-; CHECK-RV64-NEXT:    andi a2, a1, 1
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_331
-; CHECK-RV64-NEXT:    j .LBB61_837
-; CHECK-RV64-NEXT:  .LBB61_331: # %else1278
-; CHECK-RV64-NEXT:    andi a2, a1, 2
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_332
-; CHECK-RV64-NEXT:    j .LBB61_838
-; CHECK-RV64-NEXT:  .LBB61_332: # %else1282
-; CHECK-RV64-NEXT:    andi a2, a1, 4
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_333
-; CHECK-RV64-NEXT:    j .LBB61_839
-; CHECK-RV64-NEXT:  .LBB61_333: # %else1286
-; CHECK-RV64-NEXT:    andi a2, a1, 8
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_334
-; CHECK-RV64-NEXT:    j .LBB61_840
-; CHECK-RV64-NEXT:  .LBB61_334: # %else1290
-; CHECK-RV64-NEXT:    andi a2, a1, 16
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_335
-; CHECK-RV64-NEXT:    j .LBB61_841
-; CHECK-RV64-NEXT:  .LBB61_335: # %else1294
-; CHECK-RV64-NEXT:    andi a2, a1, 32
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_336
-; CHECK-RV64-NEXT:    j .LBB61_842
-; CHECK-RV64-NEXT:  .LBB61_336: # %else1298
-; CHECK-RV64-NEXT:    andi a2, a1, 64
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_337
-; CHECK-RV64-NEXT:    j .LBB61_843
-; CHECK-RV64-NEXT:  .LBB61_337: # %else1302
-; CHECK-RV64-NEXT:    andi a2, a1, 128
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_338
-; CHECK-RV64-NEXT:    j .LBB61_844
-; CHECK-RV64-NEXT:  .LBB61_338: # %else1306
-; CHECK-RV64-NEXT:    andi a2, a1, 256
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_339
-; CHECK-RV64-NEXT:    j .LBB61_845
-; CHECK-RV64-NEXT:  .LBB61_339: # %else1310
-; CHECK-RV64-NEXT:    andi a2, a1, 512
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_340
-; CHECK-RV64-NEXT:    j .LBB61_846
-; CHECK-RV64-NEXT:  .LBB61_340: # %else1314
-; CHECK-RV64-NEXT:    andi a2, a1, 1024
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_341
-; CHECK-RV64-NEXT:    j .LBB61_847
-; CHECK-RV64-NEXT:  .LBB61_341: # %else1318
-; CHECK-RV64-NEXT:    slli a2, a1, 52
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_342
-; CHECK-RV64-NEXT:    j .LBB61_848
-; CHECK-RV64-NEXT:  .LBB61_342: # %else1322
-; CHECK-RV64-NEXT:    slli a2, a1, 51
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_343
-; CHECK-RV64-NEXT:    j .LBB61_849
-; CHECK-RV64-NEXT:  .LBB61_343: # %else1326
-; CHECK-RV64-NEXT:    slli a2, a1, 50
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_344
-; CHECK-RV64-NEXT:    j .LBB61_850
-; CHECK-RV64-NEXT:  .LBB61_344: # %else1330
-; CHECK-RV64-NEXT:    slli a2, a1, 49
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_345
-; CHECK-RV64-NEXT:    j .LBB61_851
-; CHECK-RV64-NEXT:  .LBB61_345: # %else1334
-; CHECK-RV64-NEXT:    slli a2, a1, 48
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_346
-; CHECK-RV64-NEXT:    j .LBB61_852
-; CHECK-RV64-NEXT:  .LBB61_346: # %else1338
-; CHECK-RV64-NEXT:    slli a2, a1, 47
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_347
-; CHECK-RV64-NEXT:    j .LBB61_853
-; CHECK-RV64-NEXT:  .LBB61_347: # %else1342
-; CHECK-RV64-NEXT:    slli a2, a1, 46
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_348
-; CHECK-RV64-NEXT:    j .LBB61_854
-; CHECK-RV64-NEXT:  .LBB61_348: # %else1346
-; CHECK-RV64-NEXT:    slli a2, a1, 45
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_349
-; CHECK-RV64-NEXT:    j .LBB61_855
-; CHECK-RV64-NEXT:  .LBB61_349: # %else1350
-; CHECK-RV64-NEXT:    slli a2, a1, 44
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_350
-; CHECK-RV64-NEXT:    j .LBB61_856
-; CHECK-RV64-NEXT:  .LBB61_350: # %else1354
-; CHECK-RV64-NEXT:    slli a2, a1, 43
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_351
-; CHECK-RV64-NEXT:    j .LBB61_857
-; CHECK-RV64-NEXT:  .LBB61_351: # %else1358
-; CHECK-RV64-NEXT:    slli a2, a1, 42
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_352
-; CHECK-RV64-NEXT:    j .LBB61_858
-; CHECK-RV64-NEXT:  .LBB61_352: # %else1362
-; CHECK-RV64-NEXT:    slli a2, a1, 41
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_353
-; CHECK-RV64-NEXT:    j .LBB61_859
-; CHECK-RV64-NEXT:  .LBB61_353: # %else1366
-; CHECK-RV64-NEXT:    slli a2, a1, 40
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_354
-; CHECK-RV64-NEXT:    j .LBB61_860
-; CHECK-RV64-NEXT:  .LBB61_354: # %else1370
-; CHECK-RV64-NEXT:    slli a2, a1, 39
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_355
-; CHECK-RV64-NEXT:    j .LBB61_861
-; CHECK-RV64-NEXT:  .LBB61_355: # %else1374
-; CHECK-RV64-NEXT:    slli a2, a1, 38
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_356
-; CHECK-RV64-NEXT:    j .LBB61_862
-; CHECK-RV64-NEXT:  .LBB61_356: # %else1378
-; CHECK-RV64-NEXT:    slli a2, a1, 37
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_357
-; CHECK-RV64-NEXT:    j .LBB61_863
-; CHECK-RV64-NEXT:  .LBB61_357: # %else1382
-; CHECK-RV64-NEXT:    slli a2, a1, 36
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_358
-; CHECK-RV64-NEXT:    j .LBB61_864
-; CHECK-RV64-NEXT:  .LBB61_358: # %else1386
-; CHECK-RV64-NEXT:    slli a2, a1, 35
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_359
-; CHECK-RV64-NEXT:    j .LBB61_865
-; CHECK-RV64-NEXT:  .LBB61_359: # %else1390
-; CHECK-RV64-NEXT:    slli a2, a1, 34
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_360
-; CHECK-RV64-NEXT:    j .LBB61_866
-; CHECK-RV64-NEXT:  .LBB61_360: # %else1394
-; CHECK-RV64-NEXT:    slli a2, a1, 33
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_361
-; CHECK-RV64-NEXT:    j .LBB61_867
-; CHECK-RV64-NEXT:  .LBB61_361: # %else1398
-; CHECK-RV64-NEXT:    slli a2, a1, 32
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_362
-; CHECK-RV64-NEXT:    j .LBB61_868
-; CHECK-RV64-NEXT:  .LBB61_362: # %else1402
-; CHECK-RV64-NEXT:    slli a2, a1, 31
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_363
-; CHECK-RV64-NEXT:    j .LBB61_869
-; CHECK-RV64-NEXT:  .LBB61_363: # %else1406
-; CHECK-RV64-NEXT:    slli a2, a1, 30
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_364
-; CHECK-RV64-NEXT:    j .LBB61_870
-; CHECK-RV64-NEXT:  .LBB61_364: # %else1410
-; CHECK-RV64-NEXT:    slli a2, a1, 29
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_365
-; CHECK-RV64-NEXT:    j .LBB61_871
-; CHECK-RV64-NEXT:  .LBB61_365: # %else1414
-; CHECK-RV64-NEXT:    slli a2, a1, 28
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_366
-; CHECK-RV64-NEXT:    j .LBB61_872
-; CHECK-RV64-NEXT:  .LBB61_366: # %else1418
-; CHECK-RV64-NEXT:    slli a2, a1, 27
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_367
-; CHECK-RV64-NEXT:    j .LBB61_873
-; CHECK-RV64-NEXT:  .LBB61_367: # %else1422
-; CHECK-RV64-NEXT:    slli a2, a1, 26
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_368
-; CHECK-RV64-NEXT:    j .LBB61_874
-; CHECK-RV64-NEXT:  .LBB61_368: # %else1426
-; CHECK-RV64-NEXT:    slli a2, a1, 25
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_369
-; CHECK-RV64-NEXT:    j .LBB61_875
-; CHECK-RV64-NEXT:  .LBB61_369: # %else1430
-; CHECK-RV64-NEXT:    slli a2, a1, 24
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_370
-; CHECK-RV64-NEXT:    j .LBB61_876
-; CHECK-RV64-NEXT:  .LBB61_370: # %else1434
-; CHECK-RV64-NEXT:    slli a2, a1, 23
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_371
-; CHECK-RV64-NEXT:    j .LBB61_877
-; CHECK-RV64-NEXT:  .LBB61_371: # %else1438
-; CHECK-RV64-NEXT:    slli a2, a1, 22
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_372
-; CHECK-RV64-NEXT:    j .LBB61_878
-; CHECK-RV64-NEXT:  .LBB61_372: # %else1442
-; CHECK-RV64-NEXT:    slli a2, a1, 21
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_373
-; CHECK-RV64-NEXT:    j .LBB61_879
-; CHECK-RV64-NEXT:  .LBB61_373: # %else1446
-; CHECK-RV64-NEXT:    slli a2, a1, 20
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_374
-; CHECK-RV64-NEXT:    j .LBB61_880
-; CHECK-RV64-NEXT:  .LBB61_374: # %else1450
-; CHECK-RV64-NEXT:    slli a2, a1, 19
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_375
-; CHECK-RV64-NEXT:    j .LBB61_881
-; CHECK-RV64-NEXT:  .LBB61_375: # %else1454
-; CHECK-RV64-NEXT:    slli a2, a1, 18
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_376
-; CHECK-RV64-NEXT:    j .LBB61_882
-; CHECK-RV64-NEXT:  .LBB61_376: # %else1458
-; CHECK-RV64-NEXT:    slli a2, a1, 17
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_377
-; CHECK-RV64-NEXT:    j .LBB61_883
-; CHECK-RV64-NEXT:  .LBB61_377: # %else1462
-; CHECK-RV64-NEXT:    slli a2, a1, 16
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_378
-; CHECK-RV64-NEXT:    j .LBB61_884
-; CHECK-RV64-NEXT:  .LBB61_378: # %else1466
-; CHECK-RV64-NEXT:    slli a2, a1, 15
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_379
-; CHECK-RV64-NEXT:    j .LBB61_885
-; CHECK-RV64-NEXT:  .LBB61_379: # %else1470
-; CHECK-RV64-NEXT:    slli a2, a1, 14
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_380
-; CHECK-RV64-NEXT:    j .LBB61_886
-; CHECK-RV64-NEXT:  .LBB61_380: # %else1474
-; CHECK-RV64-NEXT:    slli a2, a1, 13
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_381
-; CHECK-RV64-NEXT:    j .LBB61_887
-; CHECK-RV64-NEXT:  .LBB61_381: # %else1478
-; CHECK-RV64-NEXT:    slli a2, a1, 12
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_382
-; CHECK-RV64-NEXT:    j .LBB61_888
-; CHECK-RV64-NEXT:  .LBB61_382: # %else1482
-; CHECK-RV64-NEXT:    slli a2, a1, 11
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_383
-; CHECK-RV64-NEXT:    j .LBB61_889
-; CHECK-RV64-NEXT:  .LBB61_383: # %else1486
-; CHECK-RV64-NEXT:    slli a2, a1, 10
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_384
-; CHECK-RV64-NEXT:    j .LBB61_890
-; CHECK-RV64-NEXT:  .LBB61_384: # %else1490
-; CHECK-RV64-NEXT:    slli a2, a1, 9
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_385
-; CHECK-RV64-NEXT:    j .LBB61_891
-; CHECK-RV64-NEXT:  .LBB61_385: # %else1494
-; CHECK-RV64-NEXT:    slli a2, a1, 8
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_386
-; CHECK-RV64-NEXT:    j .LBB61_892
-; CHECK-RV64-NEXT:  .LBB61_386: # %else1498
-; CHECK-RV64-NEXT:    slli a2, a1, 7
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_387
-; CHECK-RV64-NEXT:    j .LBB61_893
-; CHECK-RV64-NEXT:  .LBB61_387: # %else1502
-; CHECK-RV64-NEXT:    slli a2, a1, 6
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_388
-; CHECK-RV64-NEXT:    j .LBB61_894
-; CHECK-RV64-NEXT:  .LBB61_388: # %else1506
-; CHECK-RV64-NEXT:    slli a2, a1, 5
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_389
-; CHECK-RV64-NEXT:    j .LBB61_895
-; CHECK-RV64-NEXT:  .LBB61_389: # %else1510
-; CHECK-RV64-NEXT:    slli a2, a1, 4
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_390
-; CHECK-RV64-NEXT:    j .LBB61_896
-; CHECK-RV64-NEXT:  .LBB61_390: # %else1514
-; CHECK-RV64-NEXT:    slli a2, a1, 3
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_391
-; CHECK-RV64-NEXT:    j .LBB61_897
-; CHECK-RV64-NEXT:  .LBB61_391: # %else1518
-; CHECK-RV64-NEXT:    slli a2, a1, 2
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_393
-; CHECK-RV64-NEXT:  .LBB61_392: # %cond.load1521
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 382
-; CHECK-RV64-NEXT:    li a3, 381
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:  .LBB61_393: # %else1522
-; CHECK-RV64-NEXT:    slli a2, a1, 1
-; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV64-NEXT:    vslidedown.vi v16, v0, 6
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_395
-; CHECK-RV64-NEXT:  # %bb.394: # %cond.load1525
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    li a2, 383
-; CHECK-RV64-NEXT:    li a3, 382
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:  .LBB61_395: # %else1526
-; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.x.s a2, v16
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_396
-; CHECK-RV64-NEXT:    j .LBB61_898
-; CHECK-RV64-NEXT:  .LBB61_396: # %else1530
-; CHECK-RV64-NEXT:    andi a1, a2, 1
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_397
-; CHECK-RV64-NEXT:    j .LBB61_899
-; CHECK-RV64-NEXT:  .LBB61_397: # %else1534
-; CHECK-RV64-NEXT:    andi a1, a2, 2
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_398
-; CHECK-RV64-NEXT:    j .LBB61_900
-; CHECK-RV64-NEXT:  .LBB61_398: # %else1538
-; CHECK-RV64-NEXT:    andi a1, a2, 4
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_399
-; CHECK-RV64-NEXT:    j .LBB61_901
-; CHECK-RV64-NEXT:  .LBB61_399: # %else1542
-; CHECK-RV64-NEXT:    andi a1, a2, 8
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_400
-; CHECK-RV64-NEXT:    j .LBB61_902
-; CHECK-RV64-NEXT:  .LBB61_400: # %else1546
-; CHECK-RV64-NEXT:    andi a1, a2, 16
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_401
-; CHECK-RV64-NEXT:    j .LBB61_903
-; CHECK-RV64-NEXT:  .LBB61_401: # %else1550
-; CHECK-RV64-NEXT:    andi a1, a2, 32
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_402
-; CHECK-RV64-NEXT:    j .LBB61_904
-; CHECK-RV64-NEXT:  .LBB61_402: # %else1554
-; CHECK-RV64-NEXT:    andi a1, a2, 64
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_403
-; CHECK-RV64-NEXT:    j .LBB61_905
-; CHECK-RV64-NEXT:  .LBB61_403: # %else1558
-; CHECK-RV64-NEXT:    andi a1, a2, 128
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_404
-; CHECK-RV64-NEXT:    j .LBB61_906
-; CHECK-RV64-NEXT:  .LBB61_404: # %else1562
-; CHECK-RV64-NEXT:    andi a1, a2, 256
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_405
-; CHECK-RV64-NEXT:    j .LBB61_907
-; CHECK-RV64-NEXT:  .LBB61_405: # %else1566
-; CHECK-RV64-NEXT:    andi a1, a2, 512
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_406
-; CHECK-RV64-NEXT:    j .LBB61_908
-; CHECK-RV64-NEXT:  .LBB61_406: # %else1570
-; CHECK-RV64-NEXT:    andi a1, a2, 1024
-; CHECK-RV64-NEXT:    beqz a1, .LBB61_407
-; CHECK-RV64-NEXT:    j .LBB61_909
-; CHECK-RV64-NEXT:  .LBB61_407: # %else1574
-; CHECK-RV64-NEXT:    slli a1, a2, 52
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_408
-; CHECK-RV64-NEXT:    j .LBB61_910
-; CHECK-RV64-NEXT:  .LBB61_408: # %else1578
-; CHECK-RV64-NEXT:    slli a1, a2, 51
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_409
-; CHECK-RV64-NEXT:    j .LBB61_911
-; CHECK-RV64-NEXT:  .LBB61_409: # %else1582
-; CHECK-RV64-NEXT:    slli a1, a2, 50
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_410
-; CHECK-RV64-NEXT:    j .LBB61_912
-; CHECK-RV64-NEXT:  .LBB61_410: # %else1586
-; CHECK-RV64-NEXT:    slli a1, a2, 49
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_411
-; CHECK-RV64-NEXT:    j .LBB61_913
-; CHECK-RV64-NEXT:  .LBB61_411: # %else1590
-; CHECK-RV64-NEXT:    slli a1, a2, 48
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_412
-; CHECK-RV64-NEXT:    j .LBB61_914
-; CHECK-RV64-NEXT:  .LBB61_412: # %else1594
-; CHECK-RV64-NEXT:    slli a1, a2, 47
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_413
-; CHECK-RV64-NEXT:    j .LBB61_915
-; CHECK-RV64-NEXT:  .LBB61_413: # %else1598
-; CHECK-RV64-NEXT:    slli a1, a2, 46
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_414
-; CHECK-RV64-NEXT:    j .LBB61_916
-; CHECK-RV64-NEXT:  .LBB61_414: # %else1602
-; CHECK-RV64-NEXT:    slli a1, a2, 45
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_415
-; CHECK-RV64-NEXT:    j .LBB61_917
-; CHECK-RV64-NEXT:  .LBB61_415: # %else1606
-; CHECK-RV64-NEXT:    slli a1, a2, 44
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_416
-; CHECK-RV64-NEXT:    j .LBB61_918
-; CHECK-RV64-NEXT:  .LBB61_416: # %else1610
-; CHECK-RV64-NEXT:    slli a1, a2, 43
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_417
-; CHECK-RV64-NEXT:    j .LBB61_919
-; CHECK-RV64-NEXT:  .LBB61_417: # %else1614
-; CHECK-RV64-NEXT:    slli a1, a2, 42
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_418
-; CHECK-RV64-NEXT:    j .LBB61_920
-; CHECK-RV64-NEXT:  .LBB61_418: # %else1618
-; CHECK-RV64-NEXT:    slli a1, a2, 41
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_419
-; CHECK-RV64-NEXT:    j .LBB61_921
-; CHECK-RV64-NEXT:  .LBB61_419: # %else1622
-; CHECK-RV64-NEXT:    slli a1, a2, 40
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_420
-; CHECK-RV64-NEXT:    j .LBB61_922
-; CHECK-RV64-NEXT:  .LBB61_420: # %else1626
-; CHECK-RV64-NEXT:    slli a1, a2, 39
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_421
-; CHECK-RV64-NEXT:    j .LBB61_923
-; CHECK-RV64-NEXT:  .LBB61_421: # %else1630
-; CHECK-RV64-NEXT:    slli a1, a2, 38
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_422
-; CHECK-RV64-NEXT:    j .LBB61_924
-; CHECK-RV64-NEXT:  .LBB61_422: # %else1634
-; CHECK-RV64-NEXT:    slli a1, a2, 37
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_423
-; CHECK-RV64-NEXT:    j .LBB61_925
-; CHECK-RV64-NEXT:  .LBB61_423: # %else1638
-; CHECK-RV64-NEXT:    slli a1, a2, 36
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_424
-; CHECK-RV64-NEXT:    j .LBB61_926
-; CHECK-RV64-NEXT:  .LBB61_424: # %else1642
-; CHECK-RV64-NEXT:    slli a1, a2, 35
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_425
-; CHECK-RV64-NEXT:    j .LBB61_927
-; CHECK-RV64-NEXT:  .LBB61_425: # %else1646
-; CHECK-RV64-NEXT:    slli a1, a2, 34
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_426
-; CHECK-RV64-NEXT:    j .LBB61_928
-; CHECK-RV64-NEXT:  .LBB61_426: # %else1650
-; CHECK-RV64-NEXT:    slli a1, a2, 33
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_427
-; CHECK-RV64-NEXT:    j .LBB61_929
-; CHECK-RV64-NEXT:  .LBB61_427: # %else1654
-; CHECK-RV64-NEXT:    slli a1, a2, 32
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_428
-; CHECK-RV64-NEXT:    j .LBB61_930
-; CHECK-RV64-NEXT:  .LBB61_428: # %else1658
-; CHECK-RV64-NEXT:    slli a1, a2, 31
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_429
-; CHECK-RV64-NEXT:    j .LBB61_931
-; CHECK-RV64-NEXT:  .LBB61_429: # %else1662
-; CHECK-RV64-NEXT:    slli a1, a2, 30
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_430
-; CHECK-RV64-NEXT:    j .LBB61_932
-; CHECK-RV64-NEXT:  .LBB61_430: # %else1666
-; CHECK-RV64-NEXT:    slli a1, a2, 29
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_431
-; CHECK-RV64-NEXT:    j .LBB61_933
-; CHECK-RV64-NEXT:  .LBB61_431: # %else1670
-; CHECK-RV64-NEXT:    slli a1, a2, 28
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_432
-; CHECK-RV64-NEXT:    j .LBB61_934
-; CHECK-RV64-NEXT:  .LBB61_432: # %else1674
-; CHECK-RV64-NEXT:    slli a1, a2, 27
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_433
-; CHECK-RV64-NEXT:    j .LBB61_935
-; CHECK-RV64-NEXT:  .LBB61_433: # %else1678
-; CHECK-RV64-NEXT:    slli a1, a2, 26
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_434
-; CHECK-RV64-NEXT:    j .LBB61_936
-; CHECK-RV64-NEXT:  .LBB61_434: # %else1682
-; CHECK-RV64-NEXT:    slli a1, a2, 25
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_435
-; CHECK-RV64-NEXT:    j .LBB61_937
-; CHECK-RV64-NEXT:  .LBB61_435: # %else1686
-; CHECK-RV64-NEXT:    slli a1, a2, 24
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_436
-; CHECK-RV64-NEXT:    j .LBB61_938
-; CHECK-RV64-NEXT:  .LBB61_436: # %else1690
-; CHECK-RV64-NEXT:    slli a1, a2, 23
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_437
-; CHECK-RV64-NEXT:    j .LBB61_939
-; CHECK-RV64-NEXT:  .LBB61_437: # %else1694
-; CHECK-RV64-NEXT:    slli a1, a2, 22
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_438
-; CHECK-RV64-NEXT:    j .LBB61_940
-; CHECK-RV64-NEXT:  .LBB61_438: # %else1698
-; CHECK-RV64-NEXT:    slli a1, a2, 21
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_439
-; CHECK-RV64-NEXT:    j .LBB61_941
-; CHECK-RV64-NEXT:  .LBB61_439: # %else1702
-; CHECK-RV64-NEXT:    slli a1, a2, 20
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_440
-; CHECK-RV64-NEXT:    j .LBB61_942
-; CHECK-RV64-NEXT:  .LBB61_440: # %else1706
-; CHECK-RV64-NEXT:    slli a1, a2, 19
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_441
-; CHECK-RV64-NEXT:    j .LBB61_943
-; CHECK-RV64-NEXT:  .LBB61_441: # %else1710
-; CHECK-RV64-NEXT:    slli a1, a2, 18
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_442
-; CHECK-RV64-NEXT:    j .LBB61_944
-; CHECK-RV64-NEXT:  .LBB61_442: # %else1714
-; CHECK-RV64-NEXT:    slli a1, a2, 17
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_443
-; CHECK-RV64-NEXT:    j .LBB61_945
-; CHECK-RV64-NEXT:  .LBB61_443: # %else1718
-; CHECK-RV64-NEXT:    slli a1, a2, 16
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_444
-; CHECK-RV64-NEXT:    j .LBB61_946
-; CHECK-RV64-NEXT:  .LBB61_444: # %else1722
-; CHECK-RV64-NEXT:    slli a1, a2, 15
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_445
-; CHECK-RV64-NEXT:    j .LBB61_947
-; CHECK-RV64-NEXT:  .LBB61_445: # %else1726
-; CHECK-RV64-NEXT:    slli a1, a2, 14
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_446
-; CHECK-RV64-NEXT:    j .LBB61_948
-; CHECK-RV64-NEXT:  .LBB61_446: # %else1730
-; CHECK-RV64-NEXT:    slli a1, a2, 13
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_447
-; CHECK-RV64-NEXT:    j .LBB61_949
-; CHECK-RV64-NEXT:  .LBB61_447: # %else1734
-; CHECK-RV64-NEXT:    slli a1, a2, 12
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_448
-; CHECK-RV64-NEXT:    j .LBB61_950
-; CHECK-RV64-NEXT:  .LBB61_448: # %else1738
-; CHECK-RV64-NEXT:    slli a1, a2, 11
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_449
-; CHECK-RV64-NEXT:    j .LBB61_951
-; CHECK-RV64-NEXT:  .LBB61_449: # %else1742
-; CHECK-RV64-NEXT:    slli a1, a2, 10
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_450
-; CHECK-RV64-NEXT:    j .LBB61_952
-; CHECK-RV64-NEXT:  .LBB61_450: # %else1746
-; CHECK-RV64-NEXT:    slli a1, a2, 9
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_451
-; CHECK-RV64-NEXT:    j .LBB61_953
-; CHECK-RV64-NEXT:  .LBB61_451: # %else1750
-; CHECK-RV64-NEXT:    slli a1, a2, 8
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_452
-; CHECK-RV64-NEXT:    j .LBB61_954
-; CHECK-RV64-NEXT:  .LBB61_452: # %else1754
-; CHECK-RV64-NEXT:    slli a1, a2, 7
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_453
-; CHECK-RV64-NEXT:    j .LBB61_955
-; CHECK-RV64-NEXT:  .LBB61_453: # %else1758
-; CHECK-RV64-NEXT:    slli a1, a2, 6
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_454
-; CHECK-RV64-NEXT:    j .LBB61_956
-; CHECK-RV64-NEXT:  .LBB61_454: # %else1762
-; CHECK-RV64-NEXT:    slli a1, a2, 5
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_455
-; CHECK-RV64-NEXT:    j .LBB61_957
-; CHECK-RV64-NEXT:  .LBB61_455: # %else1766
-; CHECK-RV64-NEXT:    slli a1, a2, 4
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_456
-; CHECK-RV64-NEXT:    j .LBB61_958
-; CHECK-RV64-NEXT:  .LBB61_456: # %else1770
-; CHECK-RV64-NEXT:    slli a1, a2, 3
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_457
-; CHECK-RV64-NEXT:    j .LBB61_959
-; CHECK-RV64-NEXT:  .LBB61_457: # %else1774
-; CHECK-RV64-NEXT:    slli a1, a2, 2
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_459
-; CHECK-RV64-NEXT:  .LBB61_458: # %cond.load1777
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 446
-; CHECK-RV64-NEXT:    li a3, 445
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:  .LBB61_459: # %else1778
-; CHECK-RV64-NEXT:    slli a1, a2, 1
-; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV64-NEXT:    vslidedown.vi v16, v0, 7
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_461
-; CHECK-RV64-NEXT:  # %bb.460: # %cond.load1781
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-RV64-NEXT:    li a1, 447
-; CHECK-RV64-NEXT:    li a3, 446
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:  .LBB61_461: # %else1782
-; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.x.s a1, v16
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_462
-; CHECK-RV64-NEXT:    j .LBB61_960
-; CHECK-RV64-NEXT:  .LBB61_462: # %else1786
-; CHECK-RV64-NEXT:    andi a2, a1, 1
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_463
-; CHECK-RV64-NEXT:    j .LBB61_961
-; CHECK-RV64-NEXT:  .LBB61_463: # %else1790
-; CHECK-RV64-NEXT:    andi a2, a1, 2
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_464
-; CHECK-RV64-NEXT:    j .LBB61_962
-; CHECK-RV64-NEXT:  .LBB61_464: # %else1794
-; CHECK-RV64-NEXT:    andi a2, a1, 4
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_465
-; CHECK-RV64-NEXT:    j .LBB61_963
-; CHECK-RV64-NEXT:  .LBB61_465: # %else1798
-; CHECK-RV64-NEXT:    andi a2, a1, 8
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_466
-; CHECK-RV64-NEXT:    j .LBB61_964
-; CHECK-RV64-NEXT:  .LBB61_466: # %else1802
-; CHECK-RV64-NEXT:    andi a2, a1, 16
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_467
-; CHECK-RV64-NEXT:    j .LBB61_965
-; CHECK-RV64-NEXT:  .LBB61_467: # %else1806
-; CHECK-RV64-NEXT:    andi a2, a1, 32
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_468
-; CHECK-RV64-NEXT:    j .LBB61_966
-; CHECK-RV64-NEXT:  .LBB61_468: # %else1810
-; CHECK-RV64-NEXT:    andi a2, a1, 64
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_469
-; CHECK-RV64-NEXT:    j .LBB61_967
-; CHECK-RV64-NEXT:  .LBB61_469: # %else1814
-; CHECK-RV64-NEXT:    andi a2, a1, 128
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_470
-; CHECK-RV64-NEXT:    j .LBB61_968
-; CHECK-RV64-NEXT:  .LBB61_470: # %else1818
-; CHECK-RV64-NEXT:    andi a2, a1, 256
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_471
-; CHECK-RV64-NEXT:    j .LBB61_969
-; CHECK-RV64-NEXT:  .LBB61_471: # %else1822
-; CHECK-RV64-NEXT:    andi a2, a1, 512
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_472
-; CHECK-RV64-NEXT:    j .LBB61_970
-; CHECK-RV64-NEXT:  .LBB61_472: # %else1826
-; CHECK-RV64-NEXT:    andi a2, a1, 1024
-; CHECK-RV64-NEXT:    beqz a2, .LBB61_473
-; CHECK-RV64-NEXT:    j .LBB61_971
-; CHECK-RV64-NEXT:  .LBB61_473: # %else1830
-; CHECK-RV64-NEXT:    slli a2, a1, 52
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_474
-; CHECK-RV64-NEXT:    j .LBB61_972
-; CHECK-RV64-NEXT:  .LBB61_474: # %else1834
-; CHECK-RV64-NEXT:    slli a2, a1, 51
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_475
-; CHECK-RV64-NEXT:    j .LBB61_973
-; CHECK-RV64-NEXT:  .LBB61_475: # %else1838
-; CHECK-RV64-NEXT:    slli a2, a1, 50
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_476
-; CHECK-RV64-NEXT:    j .LBB61_974
-; CHECK-RV64-NEXT:  .LBB61_476: # %else1842
-; CHECK-RV64-NEXT:    slli a2, a1, 49
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_477
-; CHECK-RV64-NEXT:    j .LBB61_975
-; CHECK-RV64-NEXT:  .LBB61_477: # %else1846
-; CHECK-RV64-NEXT:    slli a2, a1, 48
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_478
-; CHECK-RV64-NEXT:    j .LBB61_976
-; CHECK-RV64-NEXT:  .LBB61_478: # %else1850
-; CHECK-RV64-NEXT:    slli a2, a1, 47
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_479
-; CHECK-RV64-NEXT:    j .LBB61_977
-; CHECK-RV64-NEXT:  .LBB61_479: # %else1854
-; CHECK-RV64-NEXT:    slli a2, a1, 46
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_480
-; CHECK-RV64-NEXT:    j .LBB61_978
-; CHECK-RV64-NEXT:  .LBB61_480: # %else1858
-; CHECK-RV64-NEXT:    slli a2, a1, 45
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_481
-; CHECK-RV64-NEXT:    j .LBB61_979
-; CHECK-RV64-NEXT:  .LBB61_481: # %else1862
-; CHECK-RV64-NEXT:    slli a2, a1, 44
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_482
-; CHECK-RV64-NEXT:    j .LBB61_980
-; CHECK-RV64-NEXT:  .LBB61_482: # %else1866
-; CHECK-RV64-NEXT:    slli a2, a1, 43
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_483
-; CHECK-RV64-NEXT:    j .LBB61_981
-; CHECK-RV64-NEXT:  .LBB61_483: # %else1870
-; CHECK-RV64-NEXT:    slli a2, a1, 42
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_484
-; CHECK-RV64-NEXT:    j .LBB61_982
-; CHECK-RV64-NEXT:  .LBB61_484: # %else1874
-; CHECK-RV64-NEXT:    slli a2, a1, 41
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_485
-; CHECK-RV64-NEXT:    j .LBB61_983
-; CHECK-RV64-NEXT:  .LBB61_485: # %else1878
-; CHECK-RV64-NEXT:    slli a2, a1, 40
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_486
-; CHECK-RV64-NEXT:    j .LBB61_984
-; CHECK-RV64-NEXT:  .LBB61_486: # %else1882
-; CHECK-RV64-NEXT:    slli a2, a1, 39
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_487
-; CHECK-RV64-NEXT:    j .LBB61_985
-; CHECK-RV64-NEXT:  .LBB61_487: # %else1886
-; CHECK-RV64-NEXT:    slli a2, a1, 38
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_488
-; CHECK-RV64-NEXT:    j .LBB61_986
-; CHECK-RV64-NEXT:  .LBB61_488: # %else1890
-; CHECK-RV64-NEXT:    slli a2, a1, 37
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_489
-; CHECK-RV64-NEXT:    j .LBB61_987
-; CHECK-RV64-NEXT:  .LBB61_489: # %else1894
-; CHECK-RV64-NEXT:    slli a2, a1, 36
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_490
-; CHECK-RV64-NEXT:    j .LBB61_988
-; CHECK-RV64-NEXT:  .LBB61_490: # %else1898
-; CHECK-RV64-NEXT:    slli a2, a1, 35
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_491
-; CHECK-RV64-NEXT:    j .LBB61_989
-; CHECK-RV64-NEXT:  .LBB61_491: # %else1902
-; CHECK-RV64-NEXT:    slli a2, a1, 34
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_492
-; CHECK-RV64-NEXT:    j .LBB61_990
-; CHECK-RV64-NEXT:  .LBB61_492: # %else1906
-; CHECK-RV64-NEXT:    slli a2, a1, 33
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_493
-; CHECK-RV64-NEXT:    j .LBB61_991
-; CHECK-RV64-NEXT:  .LBB61_493: # %else1910
-; CHECK-RV64-NEXT:    slli a2, a1, 32
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_494
-; CHECK-RV64-NEXT:    j .LBB61_992
-; CHECK-RV64-NEXT:  .LBB61_494: # %else1914
-; CHECK-RV64-NEXT:    slli a2, a1, 31
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_495
-; CHECK-RV64-NEXT:    j .LBB61_993
-; CHECK-RV64-NEXT:  .LBB61_495: # %else1918
-; CHECK-RV64-NEXT:    slli a2, a1, 30
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_496
-; CHECK-RV64-NEXT:    j .LBB61_994
-; CHECK-RV64-NEXT:  .LBB61_496: # %else1922
-; CHECK-RV64-NEXT:    slli a2, a1, 29
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_497
-; CHECK-RV64-NEXT:    j .LBB61_995
-; CHECK-RV64-NEXT:  .LBB61_497: # %else1926
-; CHECK-RV64-NEXT:    slli a2, a1, 28
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_498
-; CHECK-RV64-NEXT:    j .LBB61_996
-; CHECK-RV64-NEXT:  .LBB61_498: # %else1930
-; CHECK-RV64-NEXT:    slli a2, a1, 27
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_499
-; CHECK-RV64-NEXT:    j .LBB61_997
-; CHECK-RV64-NEXT:  .LBB61_499: # %else1934
-; CHECK-RV64-NEXT:    slli a2, a1, 26
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_500
-; CHECK-RV64-NEXT:    j .LBB61_998
-; CHECK-RV64-NEXT:  .LBB61_500: # %else1938
-; CHECK-RV64-NEXT:    slli a2, a1, 25
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_501
-; CHECK-RV64-NEXT:    j .LBB61_999
-; CHECK-RV64-NEXT:  .LBB61_501: # %else1942
-; CHECK-RV64-NEXT:    slli a2, a1, 24
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_502
-; CHECK-RV64-NEXT:    j .LBB61_1000
-; CHECK-RV64-NEXT:  .LBB61_502: # %else1946
-; CHECK-RV64-NEXT:    slli a2, a1, 23
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_503
-; CHECK-RV64-NEXT:    j .LBB61_1001
-; CHECK-RV64-NEXT:  .LBB61_503: # %else1950
-; CHECK-RV64-NEXT:    slli a2, a1, 22
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_504
-; CHECK-RV64-NEXT:    j .LBB61_1002
-; CHECK-RV64-NEXT:  .LBB61_504: # %else1954
-; CHECK-RV64-NEXT:    slli a2, a1, 21
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_505
-; CHECK-RV64-NEXT:    j .LBB61_1003
-; CHECK-RV64-NEXT:  .LBB61_505: # %else1958
-; CHECK-RV64-NEXT:    slli a2, a1, 20
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_506
-; CHECK-RV64-NEXT:    j .LBB61_1004
-; CHECK-RV64-NEXT:  .LBB61_506: # %else1962
-; CHECK-RV64-NEXT:    slli a2, a1, 19
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_507
-; CHECK-RV64-NEXT:    j .LBB61_1005
-; CHECK-RV64-NEXT:  .LBB61_507: # %else1966
-; CHECK-RV64-NEXT:    slli a2, a1, 18
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_508
-; CHECK-RV64-NEXT:    j .LBB61_1006
-; CHECK-RV64-NEXT:  .LBB61_508: # %else1970
-; CHECK-RV64-NEXT:    slli a2, a1, 17
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_509
-; CHECK-RV64-NEXT:    j .LBB61_1007
-; CHECK-RV64-NEXT:  .LBB61_509: # %else1974
-; CHECK-RV64-NEXT:    slli a2, a1, 16
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_510
-; CHECK-RV64-NEXT:    j .LBB61_1008
-; CHECK-RV64-NEXT:  .LBB61_510: # %else1978
-; CHECK-RV64-NEXT:    slli a2, a1, 15
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_511
-; CHECK-RV64-NEXT:    j .LBB61_1009
-; CHECK-RV64-NEXT:  .LBB61_511: # %else1982
-; CHECK-RV64-NEXT:    slli a2, a1, 14
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_512
-; CHECK-RV64-NEXT:    j .LBB61_1010
-; CHECK-RV64-NEXT:  .LBB61_512: # %else1986
-; CHECK-RV64-NEXT:    slli a2, a1, 13
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_513
-; CHECK-RV64-NEXT:    j .LBB61_1011
-; CHECK-RV64-NEXT:  .LBB61_513: # %else1990
-; CHECK-RV64-NEXT:    slli a2, a1, 12
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_514
-; CHECK-RV64-NEXT:    j .LBB61_1012
-; CHECK-RV64-NEXT:  .LBB61_514: # %else1994
-; CHECK-RV64-NEXT:    slli a2, a1, 11
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_515
-; CHECK-RV64-NEXT:    j .LBB61_1013
-; CHECK-RV64-NEXT:  .LBB61_515: # %else1998
-; CHECK-RV64-NEXT:    slli a2, a1, 10
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_516
-; CHECK-RV64-NEXT:    j .LBB61_1014
-; CHECK-RV64-NEXT:  .LBB61_516: # %else2002
-; CHECK-RV64-NEXT:    slli a2, a1, 9
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_517
-; CHECK-RV64-NEXT:    j .LBB61_1015
-; CHECK-RV64-NEXT:  .LBB61_517: # %else2006
-; CHECK-RV64-NEXT:    slli a2, a1, 8
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_518
-; CHECK-RV64-NEXT:    j .LBB61_1016
-; CHECK-RV64-NEXT:  .LBB61_518: # %else2010
-; CHECK-RV64-NEXT:    slli a2, a1, 7
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_519
-; CHECK-RV64-NEXT:    j .LBB61_1017
-; CHECK-RV64-NEXT:  .LBB61_519: # %else2014
-; CHECK-RV64-NEXT:    slli a2, a1, 6
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_520
-; CHECK-RV64-NEXT:    j .LBB61_1018
-; CHECK-RV64-NEXT:  .LBB61_520: # %else2018
-; CHECK-RV64-NEXT:    slli a2, a1, 5
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_521
-; CHECK-RV64-NEXT:    j .LBB61_1019
-; CHECK-RV64-NEXT:  .LBB61_521: # %else2022
-; CHECK-RV64-NEXT:    slli a2, a1, 4
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_522
-; CHECK-RV64-NEXT:    j .LBB61_1020
-; CHECK-RV64-NEXT:  .LBB61_522: # %else2026
-; CHECK-RV64-NEXT:    slli a2, a1, 3
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_523
-; CHECK-RV64-NEXT:    j .LBB61_1021
-; CHECK-RV64-NEXT:  .LBB61_523: # %else2030
-; CHECK-RV64-NEXT:    slli a2, a1, 2
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_524
-; CHECK-RV64-NEXT:    j .LBB61_1022
-; CHECK-RV64-NEXT:  .LBB61_524: # %else2034
-; CHECK-RV64-NEXT:    slli a2, a1, 1
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_525
-; CHECK-RV64-NEXT:    j .LBB61_1023
-; CHECK-RV64-NEXT:  .LBB61_525: # %else2038
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_526
-; CHECK-RV64-NEXT:    j .LBB61_1024
-; CHECK-RV64-NEXT:  .LBB61_526: # %else2042
-; CHECK-RV64-NEXT:    ret
-; CHECK-RV64-NEXT:  .LBB61_527: # %cond.load
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    vsetvli zero, zero, e8, mf8, tu, ma
-; CHECK-RV64-NEXT:    vmv.s.x v8, a1
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    andi a1, a2, 2
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_528
-; CHECK-RV64-NEXT:    j .LBB61_2
-; CHECK-RV64-NEXT:  .LBB61_528: # %cond.load1
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    vsetvli zero, zero, e8, mf8, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    vsetivli zero, 2, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 1
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    andi a1, a2, 4
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_529
-; CHECK-RV64-NEXT:    j .LBB61_3
-; CHECK-RV64-NEXT:  .LBB61_529: # %cond.load5
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    vsetivli zero, 3, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 2
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    andi a1, a2, 8
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_530
-; CHECK-RV64-NEXT:    j .LBB61_4
-; CHECK-RV64-NEXT:  .LBB61_530: # %cond.load9
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    vsetivli zero, 4, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    andi a1, a2, 16
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_531
-; CHECK-RV64-NEXT:    j .LBB61_5
-; CHECK-RV64-NEXT:  .LBB61_531: # %cond.load13
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    vsetivli zero, 5, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 4
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    andi a1, a2, 32
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_532
-; CHECK-RV64-NEXT:    j .LBB61_6
-; CHECK-RV64-NEXT:  .LBB61_532: # %cond.load17
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    vsetivli zero, 6, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 5
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    andi a1, a2, 64
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_533
-; CHECK-RV64-NEXT:    j .LBB61_7
-; CHECK-RV64-NEXT:  .LBB61_533: # %cond.load21
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    vsetivli zero, 7, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 6
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    andi a1, a2, 128
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_534
-; CHECK-RV64-NEXT:    j .LBB61_8
-; CHECK-RV64-NEXT:  .LBB61_534: # %cond.load25
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    vsetivli zero, 8, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 7
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    andi a1, a2, 256
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_535
-; CHECK-RV64-NEXT:    j .LBB61_9
-; CHECK-RV64-NEXT:  .LBB61_535: # %cond.load29
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    vsetivli zero, 9, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 8
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    andi a1, a2, 512
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_536
-; CHECK-RV64-NEXT:    j .LBB61_10
-; CHECK-RV64-NEXT:  .LBB61_536: # %cond.load33
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    vsetivli zero, 10, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 9
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    andi a1, a2, 1024
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_537
-; CHECK-RV64-NEXT:    j .LBB61_11
-; CHECK-RV64-NEXT:  .LBB61_537: # %cond.load37
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    vsetivli zero, 11, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 10
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 52
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_538
-; CHECK-RV64-NEXT:    j .LBB61_12
-; CHECK-RV64-NEXT:  .LBB61_538: # %cond.load41
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    vsetivli zero, 12, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 11
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 51
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_539
-; CHECK-RV64-NEXT:    j .LBB61_13
-; CHECK-RV64-NEXT:  .LBB61_539: # %cond.load45
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    vsetivli zero, 13, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 12
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 50
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_540
-; CHECK-RV64-NEXT:    j .LBB61_14
-; CHECK-RV64-NEXT:  .LBB61_540: # %cond.load49
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    vsetivli zero, 14, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 13
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 49
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_541
-; CHECK-RV64-NEXT:    j .LBB61_15
-; CHECK-RV64-NEXT:  .LBB61_541: # %cond.load53
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    vsetivli zero, 15, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 14
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 48
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_542
-; CHECK-RV64-NEXT:    j .LBB61_16
-; CHECK-RV64-NEXT:  .LBB61_542: # %cond.load57
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    vsetivli zero, 16, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 15
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 47
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_543
-; CHECK-RV64-NEXT:    j .LBB61_17
-; CHECK-RV64-NEXT:  .LBB61_543: # %cond.load61
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    vsetivli zero, 17, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 16
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 46
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_544
-; CHECK-RV64-NEXT:    j .LBB61_18
-; CHECK-RV64-NEXT:  .LBB61_544: # %cond.load65
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    vsetivli zero, 18, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 17
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 45
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_545
-; CHECK-RV64-NEXT:    j .LBB61_19
-; CHECK-RV64-NEXT:  .LBB61_545: # %cond.load69
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    vsetivli zero, 19, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 18
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 44
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_546
-; CHECK-RV64-NEXT:    j .LBB61_20
-; CHECK-RV64-NEXT:  .LBB61_546: # %cond.load73
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    vsetivli zero, 20, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 19
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 43
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_547
-; CHECK-RV64-NEXT:    j .LBB61_21
-; CHECK-RV64-NEXT:  .LBB61_547: # %cond.load77
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    vsetivli zero, 21, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 20
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 42
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_548
-; CHECK-RV64-NEXT:    j .LBB61_22
-; CHECK-RV64-NEXT:  .LBB61_548: # %cond.load81
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    vsetivli zero, 22, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 21
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 41
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_549
-; CHECK-RV64-NEXT:    j .LBB61_23
-; CHECK-RV64-NEXT:  .LBB61_549: # %cond.load85
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    vsetivli zero, 23, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 22
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 40
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_550
-; CHECK-RV64-NEXT:    j .LBB61_24
-; CHECK-RV64-NEXT:  .LBB61_550: # %cond.load89
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    vsetivli zero, 24, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 23
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 39
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_551
-; CHECK-RV64-NEXT:    j .LBB61_25
-; CHECK-RV64-NEXT:  .LBB61_551: # %cond.load93
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    vsetivli zero, 25, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 24
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 38
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_552
-; CHECK-RV64-NEXT:    j .LBB61_26
-; CHECK-RV64-NEXT:  .LBB61_552: # %cond.load97
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    vsetivli zero, 26, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 25
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 37
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_553
-; CHECK-RV64-NEXT:    j .LBB61_27
-; CHECK-RV64-NEXT:  .LBB61_553: # %cond.load101
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    vsetivli zero, 27, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 26
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 36
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_554
-; CHECK-RV64-NEXT:    j .LBB61_28
-; CHECK-RV64-NEXT:  .LBB61_554: # %cond.load105
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    vsetivli zero, 28, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 27
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 35
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_555
-; CHECK-RV64-NEXT:    j .LBB61_29
-; CHECK-RV64-NEXT:  .LBB61_555: # %cond.load109
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    vsetivli zero, 29, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 28
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 34
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_556
-; CHECK-RV64-NEXT:    j .LBB61_30
-; CHECK-RV64-NEXT:  .LBB61_556: # %cond.load113
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    vsetivli zero, 30, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 29
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 33
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_557
-; CHECK-RV64-NEXT:    j .LBB61_31
-; CHECK-RV64-NEXT:  .LBB61_557: # %cond.load117
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    vsetivli zero, 31, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 30
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 32
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_558
-; CHECK-RV64-NEXT:    j .LBB61_32
-; CHECK-RV64-NEXT:  .LBB61_558: # %cond.load121
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a1, 32
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vi v8, v24, 31
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a1, a2, 31
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_559
-; CHECK-RV64-NEXT:    j .LBB61_33
-; CHECK-RV64-NEXT:  .LBB61_559: # %cond.load125
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a1, 33
-; CHECK-RV64-NEXT:    li a3, 32
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a1, a2, 30
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_560
-; CHECK-RV64-NEXT:    j .LBB61_34
-; CHECK-RV64-NEXT:  .LBB61_560: # %cond.load129
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a1, 34
-; CHECK-RV64-NEXT:    li a3, 33
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a1, a2, 29
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_561
-; CHECK-RV64-NEXT:    j .LBB61_35
-; CHECK-RV64-NEXT:  .LBB61_561: # %cond.load133
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a1, 35
-; CHECK-RV64-NEXT:    li a3, 34
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a1, a2, 28
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_562
-; CHECK-RV64-NEXT:    j .LBB61_36
-; CHECK-RV64-NEXT:  .LBB61_562: # %cond.load137
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a1, 36
-; CHECK-RV64-NEXT:    li a3, 35
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a1, a2, 27
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_563
-; CHECK-RV64-NEXT:    j .LBB61_37
-; CHECK-RV64-NEXT:  .LBB61_563: # %cond.load141
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a1, 37
-; CHECK-RV64-NEXT:    li a3, 36
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a1, a2, 26
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_564
-; CHECK-RV64-NEXT:    j .LBB61_38
-; CHECK-RV64-NEXT:  .LBB61_564: # %cond.load145
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a1, 38
-; CHECK-RV64-NEXT:    li a3, 37
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a1, a2, 25
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_565
-; CHECK-RV64-NEXT:    j .LBB61_39
-; CHECK-RV64-NEXT:  .LBB61_565: # %cond.load149
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a1, 39
-; CHECK-RV64-NEXT:    li a3, 38
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a1, a2, 24
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_566
-; CHECK-RV64-NEXT:    j .LBB61_40
-; CHECK-RV64-NEXT:  .LBB61_566: # %cond.load153
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a1, 40
-; CHECK-RV64-NEXT:    li a3, 39
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a1, a2, 23
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_567
-; CHECK-RV64-NEXT:    j .LBB61_41
-; CHECK-RV64-NEXT:  .LBB61_567: # %cond.load157
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a1, 41
-; CHECK-RV64-NEXT:    li a3, 40
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a1, a2, 22
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_568
-; CHECK-RV64-NEXT:    j .LBB61_42
-; CHECK-RV64-NEXT:  .LBB61_568: # %cond.load161
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a1, 42
-; CHECK-RV64-NEXT:    li a3, 41
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a1, a2, 21
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_569
-; CHECK-RV64-NEXT:    j .LBB61_43
-; CHECK-RV64-NEXT:  .LBB61_569: # %cond.load165
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a1, 43
-; CHECK-RV64-NEXT:    li a3, 42
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a1, a2, 20
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_570
-; CHECK-RV64-NEXT:    j .LBB61_44
-; CHECK-RV64-NEXT:  .LBB61_570: # %cond.load169
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a1, 44
-; CHECK-RV64-NEXT:    li a3, 43
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a1, a2, 19
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_571
-; CHECK-RV64-NEXT:    j .LBB61_45
-; CHECK-RV64-NEXT:  .LBB61_571: # %cond.load173
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a1, 45
-; CHECK-RV64-NEXT:    li a3, 44
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a1, a2, 18
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_572
-; CHECK-RV64-NEXT:    j .LBB61_46
-; CHECK-RV64-NEXT:  .LBB61_572: # %cond.load177
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a1, 46
-; CHECK-RV64-NEXT:    li a3, 45
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a1, a2, 17
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_573
-; CHECK-RV64-NEXT:    j .LBB61_47
-; CHECK-RV64-NEXT:  .LBB61_573: # %cond.load181
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a1, 47
-; CHECK-RV64-NEXT:    li a3, 46
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a1, a2, 16
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_574
-; CHECK-RV64-NEXT:    j .LBB61_48
-; CHECK-RV64-NEXT:  .LBB61_574: # %cond.load185
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a1, 48
-; CHECK-RV64-NEXT:    li a3, 47
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a1, a2, 15
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_575
-; CHECK-RV64-NEXT:    j .LBB61_49
-; CHECK-RV64-NEXT:  .LBB61_575: # %cond.load189
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a1, 49
-; CHECK-RV64-NEXT:    li a3, 48
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a1, a2, 14
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_576
-; CHECK-RV64-NEXT:    j .LBB61_50
-; CHECK-RV64-NEXT:  .LBB61_576: # %cond.load193
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a1, 50
-; CHECK-RV64-NEXT:    li a3, 49
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a1, a2, 13
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_577
-; CHECK-RV64-NEXT:    j .LBB61_51
-; CHECK-RV64-NEXT:  .LBB61_577: # %cond.load197
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a1, 51
-; CHECK-RV64-NEXT:    li a3, 50
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a1, a2, 12
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_578
-; CHECK-RV64-NEXT:    j .LBB61_52
-; CHECK-RV64-NEXT:  .LBB61_578: # %cond.load201
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a1, 52
-; CHECK-RV64-NEXT:    li a3, 51
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a1, a2, 11
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_579
-; CHECK-RV64-NEXT:    j .LBB61_53
-; CHECK-RV64-NEXT:  .LBB61_579: # %cond.load205
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a1, 53
-; CHECK-RV64-NEXT:    li a3, 52
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a1, a2, 10
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_580
-; CHECK-RV64-NEXT:    j .LBB61_54
-; CHECK-RV64-NEXT:  .LBB61_580: # %cond.load209
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a1, 54
-; CHECK-RV64-NEXT:    li a3, 53
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a1, a2, 9
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_581
-; CHECK-RV64-NEXT:    j .LBB61_55
-; CHECK-RV64-NEXT:  .LBB61_581: # %cond.load213
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a1, 55
-; CHECK-RV64-NEXT:    li a3, 54
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a1, a2, 8
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_582
-; CHECK-RV64-NEXT:    j .LBB61_56
-; CHECK-RV64-NEXT:  .LBB61_582: # %cond.load217
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a1, 56
-; CHECK-RV64-NEXT:    li a3, 55
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a1, a2, 7
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_583
-; CHECK-RV64-NEXT:    j .LBB61_57
-; CHECK-RV64-NEXT:  .LBB61_583: # %cond.load221
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a1, 57
-; CHECK-RV64-NEXT:    li a3, 56
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a1, a2, 6
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_584
-; CHECK-RV64-NEXT:    j .LBB61_58
-; CHECK-RV64-NEXT:  .LBB61_584: # %cond.load225
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a1, 58
-; CHECK-RV64-NEXT:    li a3, 57
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a1, a2, 5
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_585
-; CHECK-RV64-NEXT:    j .LBB61_59
-; CHECK-RV64-NEXT:  .LBB61_585: # %cond.load229
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a1, 59
-; CHECK-RV64-NEXT:    li a3, 58
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a1, a2, 4
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_586
-; CHECK-RV64-NEXT:    j .LBB61_60
-; CHECK-RV64-NEXT:  .LBB61_586: # %cond.load233
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a1, 60
-; CHECK-RV64-NEXT:    li a3, 59
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a1, a2, 3
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_587
-; CHECK-RV64-NEXT:    j .LBB61_61
-; CHECK-RV64-NEXT:  .LBB61_587: # %cond.load237
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a1, 61
-; CHECK-RV64-NEXT:    li a3, 60
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a1, a2, 2
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_1025
-; CHECK-RV64-NEXT:    j .LBB61_62
-; CHECK-RV64-NEXT:  .LBB61_1025: # %cond.load237
-; CHECK-RV64-NEXT:    j .LBB61_63
-; CHECK-RV64-NEXT:  .LBB61_588: # %cond.load249
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 64
-; CHECK-RV64-NEXT:    li a3, 63
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m1, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    andi a2, a1, 1
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_589
-; CHECK-RV64-NEXT:    j .LBB61_67
-; CHECK-RV64-NEXT:  .LBB61_589: # %cond.load253
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 65
-; CHECK-RV64-NEXT:    li a3, 64
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    andi a2, a1, 2
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_590
-; CHECK-RV64-NEXT:    j .LBB61_68
-; CHECK-RV64-NEXT:  .LBB61_590: # %cond.load257
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 66
-; CHECK-RV64-NEXT:    li a3, 65
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    andi a2, a1, 4
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_591
-; CHECK-RV64-NEXT:    j .LBB61_69
-; CHECK-RV64-NEXT:  .LBB61_591: # %cond.load261
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 67
-; CHECK-RV64-NEXT:    li a3, 66
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    andi a2, a1, 8
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_592
-; CHECK-RV64-NEXT:    j .LBB61_70
-; CHECK-RV64-NEXT:  .LBB61_592: # %cond.load265
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 68
-; CHECK-RV64-NEXT:    li a3, 67
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    andi a2, a1, 16
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_593
-; CHECK-RV64-NEXT:    j .LBB61_71
-; CHECK-RV64-NEXT:  .LBB61_593: # %cond.load269
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 69
-; CHECK-RV64-NEXT:    li a3, 68
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    andi a2, a1, 32
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_594
-; CHECK-RV64-NEXT:    j .LBB61_72
-; CHECK-RV64-NEXT:  .LBB61_594: # %cond.load273
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 70
-; CHECK-RV64-NEXT:    li a3, 69
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    andi a2, a1, 64
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_595
-; CHECK-RV64-NEXT:    j .LBB61_73
-; CHECK-RV64-NEXT:  .LBB61_595: # %cond.load277
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 71
-; CHECK-RV64-NEXT:    li a3, 70
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    andi a2, a1, 128
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_596
-; CHECK-RV64-NEXT:    j .LBB61_74
-; CHECK-RV64-NEXT:  .LBB61_596: # %cond.load281
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 72
-; CHECK-RV64-NEXT:    li a3, 71
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    andi a2, a1, 256
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_597
-; CHECK-RV64-NEXT:    j .LBB61_75
-; CHECK-RV64-NEXT:  .LBB61_597: # %cond.load285
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 73
-; CHECK-RV64-NEXT:    li a3, 72
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    andi a2, a1, 512
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_598
-; CHECK-RV64-NEXT:    j .LBB61_76
-; CHECK-RV64-NEXT:  .LBB61_598: # %cond.load289
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 74
-; CHECK-RV64-NEXT:    li a3, 73
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    andi a2, a1, 1024
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_599
-; CHECK-RV64-NEXT:    j .LBB61_77
-; CHECK-RV64-NEXT:  .LBB61_599: # %cond.load293
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 75
-; CHECK-RV64-NEXT:    li a3, 74
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 52
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_600
-; CHECK-RV64-NEXT:    j .LBB61_78
-; CHECK-RV64-NEXT:  .LBB61_600: # %cond.load297
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 76
-; CHECK-RV64-NEXT:    li a3, 75
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 51
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_601
-; CHECK-RV64-NEXT:    j .LBB61_79
-; CHECK-RV64-NEXT:  .LBB61_601: # %cond.load301
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 77
-; CHECK-RV64-NEXT:    li a3, 76
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 50
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_602
-; CHECK-RV64-NEXT:    j .LBB61_80
-; CHECK-RV64-NEXT:  .LBB61_602: # %cond.load305
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 78
-; CHECK-RV64-NEXT:    li a3, 77
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 49
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_603
-; CHECK-RV64-NEXT:    j .LBB61_81
-; CHECK-RV64-NEXT:  .LBB61_603: # %cond.load309
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 79
-; CHECK-RV64-NEXT:    li a3, 78
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 48
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_604
-; CHECK-RV64-NEXT:    j .LBB61_82
-; CHECK-RV64-NEXT:  .LBB61_604: # %cond.load313
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 80
-; CHECK-RV64-NEXT:    li a3, 79
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 47
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_605
-; CHECK-RV64-NEXT:    j .LBB61_83
-; CHECK-RV64-NEXT:  .LBB61_605: # %cond.load317
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 81
-; CHECK-RV64-NEXT:    li a3, 80
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 46
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_606
-; CHECK-RV64-NEXT:    j .LBB61_84
-; CHECK-RV64-NEXT:  .LBB61_606: # %cond.load321
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 82
-; CHECK-RV64-NEXT:    li a3, 81
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 45
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_607
-; CHECK-RV64-NEXT:    j .LBB61_85
-; CHECK-RV64-NEXT:  .LBB61_607: # %cond.load325
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 83
-; CHECK-RV64-NEXT:    li a3, 82
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 44
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_608
-; CHECK-RV64-NEXT:    j .LBB61_86
-; CHECK-RV64-NEXT:  .LBB61_608: # %cond.load329
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 84
-; CHECK-RV64-NEXT:    li a3, 83
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 43
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_609
-; CHECK-RV64-NEXT:    j .LBB61_87
-; CHECK-RV64-NEXT:  .LBB61_609: # %cond.load333
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 85
-; CHECK-RV64-NEXT:    li a3, 84
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 42
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_610
-; CHECK-RV64-NEXT:    j .LBB61_88
-; CHECK-RV64-NEXT:  .LBB61_610: # %cond.load337
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 86
-; CHECK-RV64-NEXT:    li a3, 85
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 41
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_611
-; CHECK-RV64-NEXT:    j .LBB61_89
-; CHECK-RV64-NEXT:  .LBB61_611: # %cond.load341
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 87
-; CHECK-RV64-NEXT:    li a3, 86
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 40
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_612
-; CHECK-RV64-NEXT:    j .LBB61_90
-; CHECK-RV64-NEXT:  .LBB61_612: # %cond.load345
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 88
-; CHECK-RV64-NEXT:    li a3, 87
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 39
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_613
-; CHECK-RV64-NEXT:    j .LBB61_91
-; CHECK-RV64-NEXT:  .LBB61_613: # %cond.load349
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 89
-; CHECK-RV64-NEXT:    li a3, 88
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 38
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_614
-; CHECK-RV64-NEXT:    j .LBB61_92
-; CHECK-RV64-NEXT:  .LBB61_614: # %cond.load353
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 90
-; CHECK-RV64-NEXT:    li a3, 89
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 37
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_615
-; CHECK-RV64-NEXT:    j .LBB61_93
-; CHECK-RV64-NEXT:  .LBB61_615: # %cond.load357
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 91
-; CHECK-RV64-NEXT:    li a3, 90
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 36
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_616
-; CHECK-RV64-NEXT:    j .LBB61_94
-; CHECK-RV64-NEXT:  .LBB61_616: # %cond.load361
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 92
-; CHECK-RV64-NEXT:    li a3, 91
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 35
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_617
-; CHECK-RV64-NEXT:    j .LBB61_95
-; CHECK-RV64-NEXT:  .LBB61_617: # %cond.load365
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 93
-; CHECK-RV64-NEXT:    li a3, 92
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 34
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_618
-; CHECK-RV64-NEXT:    j .LBB61_96
-; CHECK-RV64-NEXT:  .LBB61_618: # %cond.load369
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 94
-; CHECK-RV64-NEXT:    li a3, 93
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 33
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_619
-; CHECK-RV64-NEXT:    j .LBB61_97
-; CHECK-RV64-NEXT:  .LBB61_619: # %cond.load373
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 95
-; CHECK-RV64-NEXT:    li a3, 94
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 32
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_620
-; CHECK-RV64-NEXT:    j .LBB61_98
-; CHECK-RV64-NEXT:  .LBB61_620: # %cond.load377
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 96
-; CHECK-RV64-NEXT:    li a3, 95
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 31
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_621
-; CHECK-RV64-NEXT:    j .LBB61_99
-; CHECK-RV64-NEXT:  .LBB61_621: # %cond.load381
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 97
-; CHECK-RV64-NEXT:    li a3, 96
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 30
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_622
-; CHECK-RV64-NEXT:    j .LBB61_100
-; CHECK-RV64-NEXT:  .LBB61_622: # %cond.load385
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 98
-; CHECK-RV64-NEXT:    li a3, 97
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 29
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_623
-; CHECK-RV64-NEXT:    j .LBB61_101
-; CHECK-RV64-NEXT:  .LBB61_623: # %cond.load389
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 99
-; CHECK-RV64-NEXT:    li a3, 98
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 28
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_624
-; CHECK-RV64-NEXT:    j .LBB61_102
-; CHECK-RV64-NEXT:  .LBB61_624: # %cond.load393
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 100
-; CHECK-RV64-NEXT:    li a3, 99
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 27
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_625
-; CHECK-RV64-NEXT:    j .LBB61_103
-; CHECK-RV64-NEXT:  .LBB61_625: # %cond.load397
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 101
-; CHECK-RV64-NEXT:    li a3, 100
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 26
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_626
-; CHECK-RV64-NEXT:    j .LBB61_104
-; CHECK-RV64-NEXT:  .LBB61_626: # %cond.load401
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 102
-; CHECK-RV64-NEXT:    li a3, 101
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 25
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_627
-; CHECK-RV64-NEXT:    j .LBB61_105
-; CHECK-RV64-NEXT:  .LBB61_627: # %cond.load405
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 103
-; CHECK-RV64-NEXT:    li a3, 102
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 24
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_628
-; CHECK-RV64-NEXT:    j .LBB61_106
-; CHECK-RV64-NEXT:  .LBB61_628: # %cond.load409
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 104
-; CHECK-RV64-NEXT:    li a3, 103
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 23
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_629
-; CHECK-RV64-NEXT:    j .LBB61_107
-; CHECK-RV64-NEXT:  .LBB61_629: # %cond.load413
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 105
-; CHECK-RV64-NEXT:    li a3, 104
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 22
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_630
-; CHECK-RV64-NEXT:    j .LBB61_108
-; CHECK-RV64-NEXT:  .LBB61_630: # %cond.load417
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 106
-; CHECK-RV64-NEXT:    li a3, 105
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 21
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_631
-; CHECK-RV64-NEXT:    j .LBB61_109
-; CHECK-RV64-NEXT:  .LBB61_631: # %cond.load421
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 107
-; CHECK-RV64-NEXT:    li a3, 106
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 20
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_632
-; CHECK-RV64-NEXT:    j .LBB61_110
-; CHECK-RV64-NEXT:  .LBB61_632: # %cond.load425
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 108
-; CHECK-RV64-NEXT:    li a3, 107
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 19
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_633
-; CHECK-RV64-NEXT:    j .LBB61_111
-; CHECK-RV64-NEXT:  .LBB61_633: # %cond.load429
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 109
-; CHECK-RV64-NEXT:    li a3, 108
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 18
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_634
-; CHECK-RV64-NEXT:    j .LBB61_112
-; CHECK-RV64-NEXT:  .LBB61_634: # %cond.load433
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 110
-; CHECK-RV64-NEXT:    li a3, 109
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 17
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_635
-; CHECK-RV64-NEXT:    j .LBB61_113
-; CHECK-RV64-NEXT:  .LBB61_635: # %cond.load437
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 111
-; CHECK-RV64-NEXT:    li a3, 110
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 16
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_636
-; CHECK-RV64-NEXT:    j .LBB61_114
-; CHECK-RV64-NEXT:  .LBB61_636: # %cond.load441
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 112
-; CHECK-RV64-NEXT:    li a3, 111
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 15
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_637
-; CHECK-RV64-NEXT:    j .LBB61_115
-; CHECK-RV64-NEXT:  .LBB61_637: # %cond.load445
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 113
-; CHECK-RV64-NEXT:    li a3, 112
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 14
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_638
-; CHECK-RV64-NEXT:    j .LBB61_116
-; CHECK-RV64-NEXT:  .LBB61_638: # %cond.load449
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 114
-; CHECK-RV64-NEXT:    li a3, 113
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 13
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_639
-; CHECK-RV64-NEXT:    j .LBB61_117
-; CHECK-RV64-NEXT:  .LBB61_639: # %cond.load453
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 115
-; CHECK-RV64-NEXT:    li a3, 114
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 12
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_640
-; CHECK-RV64-NEXT:    j .LBB61_118
-; CHECK-RV64-NEXT:  .LBB61_640: # %cond.load457
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 116
-; CHECK-RV64-NEXT:    li a3, 115
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 11
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_641
-; CHECK-RV64-NEXT:    j .LBB61_119
-; CHECK-RV64-NEXT:  .LBB61_641: # %cond.load461
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 117
-; CHECK-RV64-NEXT:    li a3, 116
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 10
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_642
-; CHECK-RV64-NEXT:    j .LBB61_120
-; CHECK-RV64-NEXT:  .LBB61_642: # %cond.load465
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 118
-; CHECK-RV64-NEXT:    li a3, 117
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 9
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_643
-; CHECK-RV64-NEXT:    j .LBB61_121
-; CHECK-RV64-NEXT:  .LBB61_643: # %cond.load469
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 119
-; CHECK-RV64-NEXT:    li a3, 118
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 8
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_644
-; CHECK-RV64-NEXT:    j .LBB61_122
-; CHECK-RV64-NEXT:  .LBB61_644: # %cond.load473
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 120
-; CHECK-RV64-NEXT:    li a3, 119
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 7
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_645
-; CHECK-RV64-NEXT:    j .LBB61_123
-; CHECK-RV64-NEXT:  .LBB61_645: # %cond.load477
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 121
-; CHECK-RV64-NEXT:    li a3, 120
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 6
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_646
-; CHECK-RV64-NEXT:    j .LBB61_124
-; CHECK-RV64-NEXT:  .LBB61_646: # %cond.load481
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 122
-; CHECK-RV64-NEXT:    li a3, 121
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 5
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_647
-; CHECK-RV64-NEXT:    j .LBB61_125
-; CHECK-RV64-NEXT:  .LBB61_647: # %cond.load485
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 123
-; CHECK-RV64-NEXT:    li a3, 122
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 4
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_648
-; CHECK-RV64-NEXT:    j .LBB61_126
-; CHECK-RV64-NEXT:  .LBB61_648: # %cond.load489
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 124
-; CHECK-RV64-NEXT:    li a3, 123
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 3
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_649
-; CHECK-RV64-NEXT:    j .LBB61_127
-; CHECK-RV64-NEXT:  .LBB61_649: # %cond.load493
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a2, 125
-; CHECK-RV64-NEXT:    li a3, 124
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    slli a2, a1, 2
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_1026
-; CHECK-RV64-NEXT:    j .LBB61_128
-; CHECK-RV64-NEXT:  .LBB61_1026: # %cond.load493
-; CHECK-RV64-NEXT:    j .LBB61_129
-; CHECK-RV64-NEXT:  .LBB61_650: # %cond.load505
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-RV64-NEXT:    li a1, 128
-; CHECK-RV64-NEXT:    li a3, 127
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m2, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-RV64-NEXT:    andi a1, a2, 1
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_651
-; CHECK-RV64-NEXT:    j .LBB61_133
-; CHECK-RV64-NEXT:  .LBB61_651: # %cond.load509
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 129
-; CHECK-RV64-NEXT:    li a3, 128
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    andi a1, a2, 2
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_652
-; CHECK-RV64-NEXT:    j .LBB61_134
-; CHECK-RV64-NEXT:  .LBB61_652: # %cond.load513
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 130
-; CHECK-RV64-NEXT:    li a3, 129
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    andi a1, a2, 4
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_653
-; CHECK-RV64-NEXT:    j .LBB61_135
-; CHECK-RV64-NEXT:  .LBB61_653: # %cond.load517
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 131
-; CHECK-RV64-NEXT:    li a3, 130
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    andi a1, a2, 8
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_654
-; CHECK-RV64-NEXT:    j .LBB61_136
-; CHECK-RV64-NEXT:  .LBB61_654: # %cond.load521
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 132
-; CHECK-RV64-NEXT:    li a3, 131
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    andi a1, a2, 16
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_655
-; CHECK-RV64-NEXT:    j .LBB61_137
-; CHECK-RV64-NEXT:  .LBB61_655: # %cond.load525
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 133
-; CHECK-RV64-NEXT:    li a3, 132
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    andi a1, a2, 32
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_656
-; CHECK-RV64-NEXT:    j .LBB61_138
-; CHECK-RV64-NEXT:  .LBB61_656: # %cond.load529
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 134
-; CHECK-RV64-NEXT:    li a3, 133
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    andi a1, a2, 64
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_657
-; CHECK-RV64-NEXT:    j .LBB61_139
-; CHECK-RV64-NEXT:  .LBB61_657: # %cond.load533
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 135
-; CHECK-RV64-NEXT:    li a3, 134
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    andi a1, a2, 128
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_658
-; CHECK-RV64-NEXT:    j .LBB61_140
-; CHECK-RV64-NEXT:  .LBB61_658: # %cond.load537
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 136
-; CHECK-RV64-NEXT:    li a3, 135
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    andi a1, a2, 256
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_659
-; CHECK-RV64-NEXT:    j .LBB61_141
-; CHECK-RV64-NEXT:  .LBB61_659: # %cond.load541
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 137
-; CHECK-RV64-NEXT:    li a3, 136
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    andi a1, a2, 512
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_660
-; CHECK-RV64-NEXT:    j .LBB61_142
-; CHECK-RV64-NEXT:  .LBB61_660: # %cond.load545
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 138
-; CHECK-RV64-NEXT:    li a3, 137
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    andi a1, a2, 1024
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_661
-; CHECK-RV64-NEXT:    j .LBB61_143
-; CHECK-RV64-NEXT:  .LBB61_661: # %cond.load549
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 139
-; CHECK-RV64-NEXT:    li a3, 138
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 52
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_662
-; CHECK-RV64-NEXT:    j .LBB61_144
-; CHECK-RV64-NEXT:  .LBB61_662: # %cond.load553
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 140
-; CHECK-RV64-NEXT:    li a3, 139
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 51
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_663
-; CHECK-RV64-NEXT:    j .LBB61_145
-; CHECK-RV64-NEXT:  .LBB61_663: # %cond.load557
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 141
-; CHECK-RV64-NEXT:    li a3, 140
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 50
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_664
-; CHECK-RV64-NEXT:    j .LBB61_146
-; CHECK-RV64-NEXT:  .LBB61_664: # %cond.load561
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 142
-; CHECK-RV64-NEXT:    li a3, 141
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 49
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_665
-; CHECK-RV64-NEXT:    j .LBB61_147
-; CHECK-RV64-NEXT:  .LBB61_665: # %cond.load565
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 143
-; CHECK-RV64-NEXT:    li a3, 142
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 48
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_666
-; CHECK-RV64-NEXT:    j .LBB61_148
-; CHECK-RV64-NEXT:  .LBB61_666: # %cond.load569
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 144
-; CHECK-RV64-NEXT:    li a3, 143
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 47
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_667
-; CHECK-RV64-NEXT:    j .LBB61_149
-; CHECK-RV64-NEXT:  .LBB61_667: # %cond.load573
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 145
-; CHECK-RV64-NEXT:    li a3, 144
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 46
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_668
-; CHECK-RV64-NEXT:    j .LBB61_150
-; CHECK-RV64-NEXT:  .LBB61_668: # %cond.load577
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 146
-; CHECK-RV64-NEXT:    li a3, 145
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 45
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_669
-; CHECK-RV64-NEXT:    j .LBB61_151
-; CHECK-RV64-NEXT:  .LBB61_669: # %cond.load581
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 147
-; CHECK-RV64-NEXT:    li a3, 146
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 44
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_670
-; CHECK-RV64-NEXT:    j .LBB61_152
-; CHECK-RV64-NEXT:  .LBB61_670: # %cond.load585
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 148
-; CHECK-RV64-NEXT:    li a3, 147
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 43
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_671
-; CHECK-RV64-NEXT:    j .LBB61_153
-; CHECK-RV64-NEXT:  .LBB61_671: # %cond.load589
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 149
-; CHECK-RV64-NEXT:    li a3, 148
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 42
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_672
-; CHECK-RV64-NEXT:    j .LBB61_154
-; CHECK-RV64-NEXT:  .LBB61_672: # %cond.load593
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 150
-; CHECK-RV64-NEXT:    li a3, 149
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 41
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_673
-; CHECK-RV64-NEXT:    j .LBB61_155
-; CHECK-RV64-NEXT:  .LBB61_673: # %cond.load597
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 151
-; CHECK-RV64-NEXT:    li a3, 150
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 40
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_674
-; CHECK-RV64-NEXT:    j .LBB61_156
-; CHECK-RV64-NEXT:  .LBB61_674: # %cond.load601
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 152
-; CHECK-RV64-NEXT:    li a3, 151
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 39
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_675
-; CHECK-RV64-NEXT:    j .LBB61_157
-; CHECK-RV64-NEXT:  .LBB61_675: # %cond.load605
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 153
-; CHECK-RV64-NEXT:    li a3, 152
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 38
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_676
-; CHECK-RV64-NEXT:    j .LBB61_158
-; CHECK-RV64-NEXT:  .LBB61_676: # %cond.load609
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 154
-; CHECK-RV64-NEXT:    li a3, 153
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 37
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_677
-; CHECK-RV64-NEXT:    j .LBB61_159
-; CHECK-RV64-NEXT:  .LBB61_677: # %cond.load613
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 155
-; CHECK-RV64-NEXT:    li a3, 154
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 36
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_678
-; CHECK-RV64-NEXT:    j .LBB61_160
-; CHECK-RV64-NEXT:  .LBB61_678: # %cond.load617
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 156
-; CHECK-RV64-NEXT:    li a3, 155
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 35
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_679
-; CHECK-RV64-NEXT:    j .LBB61_161
-; CHECK-RV64-NEXT:  .LBB61_679: # %cond.load621
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 157
-; CHECK-RV64-NEXT:    li a3, 156
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 34
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_680
-; CHECK-RV64-NEXT:    j .LBB61_162
-; CHECK-RV64-NEXT:  .LBB61_680: # %cond.load625
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 158
-; CHECK-RV64-NEXT:    li a3, 157
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 33
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_681
-; CHECK-RV64-NEXT:    j .LBB61_163
-; CHECK-RV64-NEXT:  .LBB61_681: # %cond.load629
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 159
-; CHECK-RV64-NEXT:    li a3, 158
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 32
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_682
-; CHECK-RV64-NEXT:    j .LBB61_164
-; CHECK-RV64-NEXT:  .LBB61_682: # %cond.load633
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 160
-; CHECK-RV64-NEXT:    li a3, 159
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 31
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_683
-; CHECK-RV64-NEXT:    j .LBB61_165
-; CHECK-RV64-NEXT:  .LBB61_683: # %cond.load637
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 161
-; CHECK-RV64-NEXT:    li a3, 160
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 30
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_684
-; CHECK-RV64-NEXT:    j .LBB61_166
-; CHECK-RV64-NEXT:  .LBB61_684: # %cond.load641
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 162
-; CHECK-RV64-NEXT:    li a3, 161
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 29
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_685
-; CHECK-RV64-NEXT:    j .LBB61_167
-; CHECK-RV64-NEXT:  .LBB61_685: # %cond.load645
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 163
-; CHECK-RV64-NEXT:    li a3, 162
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 28
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_686
-; CHECK-RV64-NEXT:    j .LBB61_168
-; CHECK-RV64-NEXT:  .LBB61_686: # %cond.load649
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 164
-; CHECK-RV64-NEXT:    li a3, 163
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 27
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_687
-; CHECK-RV64-NEXT:    j .LBB61_169
-; CHECK-RV64-NEXT:  .LBB61_687: # %cond.load653
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 165
-; CHECK-RV64-NEXT:    li a3, 164
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 26
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_688
-; CHECK-RV64-NEXT:    j .LBB61_170
-; CHECK-RV64-NEXT:  .LBB61_688: # %cond.load657
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 166
-; CHECK-RV64-NEXT:    li a3, 165
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 25
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_689
-; CHECK-RV64-NEXT:    j .LBB61_171
-; CHECK-RV64-NEXT:  .LBB61_689: # %cond.load661
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 167
-; CHECK-RV64-NEXT:    li a3, 166
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 24
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_690
-; CHECK-RV64-NEXT:    j .LBB61_172
-; CHECK-RV64-NEXT:  .LBB61_690: # %cond.load665
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 168
-; CHECK-RV64-NEXT:    li a3, 167
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 23
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_691
-; CHECK-RV64-NEXT:    j .LBB61_173
-; CHECK-RV64-NEXT:  .LBB61_691: # %cond.load669
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 169
-; CHECK-RV64-NEXT:    li a3, 168
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 22
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_692
-; CHECK-RV64-NEXT:    j .LBB61_174
-; CHECK-RV64-NEXT:  .LBB61_692: # %cond.load673
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 170
-; CHECK-RV64-NEXT:    li a3, 169
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 21
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_693
-; CHECK-RV64-NEXT:    j .LBB61_175
-; CHECK-RV64-NEXT:  .LBB61_693: # %cond.load677
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 171
-; CHECK-RV64-NEXT:    li a3, 170
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 20
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_694
-; CHECK-RV64-NEXT:    j .LBB61_176
-; CHECK-RV64-NEXT:  .LBB61_694: # %cond.load681
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 172
-; CHECK-RV64-NEXT:    li a3, 171
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 19
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_695
-; CHECK-RV64-NEXT:    j .LBB61_177
-; CHECK-RV64-NEXT:  .LBB61_695: # %cond.load685
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 173
-; CHECK-RV64-NEXT:    li a3, 172
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 18
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_696
-; CHECK-RV64-NEXT:    j .LBB61_178
-; CHECK-RV64-NEXT:  .LBB61_696: # %cond.load689
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 174
-; CHECK-RV64-NEXT:    li a3, 173
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 17
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_697
-; CHECK-RV64-NEXT:    j .LBB61_179
-; CHECK-RV64-NEXT:  .LBB61_697: # %cond.load693
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 175
-; CHECK-RV64-NEXT:    li a3, 174
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 16
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_698
-; CHECK-RV64-NEXT:    j .LBB61_180
-; CHECK-RV64-NEXT:  .LBB61_698: # %cond.load697
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 176
-; CHECK-RV64-NEXT:    li a3, 175
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 15
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_699
-; CHECK-RV64-NEXT:    j .LBB61_181
-; CHECK-RV64-NEXT:  .LBB61_699: # %cond.load701
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 177
-; CHECK-RV64-NEXT:    li a3, 176
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 14
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_700
-; CHECK-RV64-NEXT:    j .LBB61_182
-; CHECK-RV64-NEXT:  .LBB61_700: # %cond.load705
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 178
-; CHECK-RV64-NEXT:    li a3, 177
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 13
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_701
-; CHECK-RV64-NEXT:    j .LBB61_183
-; CHECK-RV64-NEXT:  .LBB61_701: # %cond.load709
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 179
-; CHECK-RV64-NEXT:    li a3, 178
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 12
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_702
-; CHECK-RV64-NEXT:    j .LBB61_184
-; CHECK-RV64-NEXT:  .LBB61_702: # %cond.load713
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 180
-; CHECK-RV64-NEXT:    li a3, 179
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 11
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_703
-; CHECK-RV64-NEXT:    j .LBB61_185
-; CHECK-RV64-NEXT:  .LBB61_703: # %cond.load717
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 181
-; CHECK-RV64-NEXT:    li a3, 180
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 10
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_704
-; CHECK-RV64-NEXT:    j .LBB61_186
-; CHECK-RV64-NEXT:  .LBB61_704: # %cond.load721
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 182
-; CHECK-RV64-NEXT:    li a3, 181
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 9
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_705
-; CHECK-RV64-NEXT:    j .LBB61_187
-; CHECK-RV64-NEXT:  .LBB61_705: # %cond.load725
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 183
-; CHECK-RV64-NEXT:    li a3, 182
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 8
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_706
-; CHECK-RV64-NEXT:    j .LBB61_188
-; CHECK-RV64-NEXT:  .LBB61_706: # %cond.load729
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 184
-; CHECK-RV64-NEXT:    li a3, 183
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 7
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_707
-; CHECK-RV64-NEXT:    j .LBB61_189
-; CHECK-RV64-NEXT:  .LBB61_707: # %cond.load733
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 185
-; CHECK-RV64-NEXT:    li a3, 184
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 6
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_708
-; CHECK-RV64-NEXT:    j .LBB61_190
-; CHECK-RV64-NEXT:  .LBB61_708: # %cond.load737
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 186
-; CHECK-RV64-NEXT:    li a3, 185
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 5
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_709
-; CHECK-RV64-NEXT:    j .LBB61_191
-; CHECK-RV64-NEXT:  .LBB61_709: # %cond.load741
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 187
-; CHECK-RV64-NEXT:    li a3, 186
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 4
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_710
-; CHECK-RV64-NEXT:    j .LBB61_192
-; CHECK-RV64-NEXT:  .LBB61_710: # %cond.load745
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 188
-; CHECK-RV64-NEXT:    li a3, 187
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 3
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_711
-; CHECK-RV64-NEXT:    j .LBB61_193
-; CHECK-RV64-NEXT:  .LBB61_711: # %cond.load749
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 189
-; CHECK-RV64-NEXT:    li a3, 188
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a1, a2, 2
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_1027
-; CHECK-RV64-NEXT:    j .LBB61_194
-; CHECK-RV64-NEXT:  .LBB61_1027: # %cond.load749
-; CHECK-RV64-NEXT:    j .LBB61_195
-; CHECK-RV64-NEXT:  .LBB61_712: # %cond.load761
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 192
-; CHECK-RV64-NEXT:    li a3, 191
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    andi a2, a1, 1
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_713
-; CHECK-RV64-NEXT:    j .LBB61_199
-; CHECK-RV64-NEXT:  .LBB61_713: # %cond.load765
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 193
-; CHECK-RV64-NEXT:    li a3, 192
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    andi a2, a1, 2
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_714
-; CHECK-RV64-NEXT:    j .LBB61_200
-; CHECK-RV64-NEXT:  .LBB61_714: # %cond.load769
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 194
-; CHECK-RV64-NEXT:    li a3, 193
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    andi a2, a1, 4
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_715
-; CHECK-RV64-NEXT:    j .LBB61_201
-; CHECK-RV64-NEXT:  .LBB61_715: # %cond.load773
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 195
-; CHECK-RV64-NEXT:    li a3, 194
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    andi a2, a1, 8
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_716
-; CHECK-RV64-NEXT:    j .LBB61_202
-; CHECK-RV64-NEXT:  .LBB61_716: # %cond.load777
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 196
-; CHECK-RV64-NEXT:    li a3, 195
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    andi a2, a1, 16
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_717
-; CHECK-RV64-NEXT:    j .LBB61_203
-; CHECK-RV64-NEXT:  .LBB61_717: # %cond.load781
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 197
-; CHECK-RV64-NEXT:    li a3, 196
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    andi a2, a1, 32
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_718
-; CHECK-RV64-NEXT:    j .LBB61_204
-; CHECK-RV64-NEXT:  .LBB61_718: # %cond.load785
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 198
-; CHECK-RV64-NEXT:    li a3, 197
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    andi a2, a1, 64
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_719
-; CHECK-RV64-NEXT:    j .LBB61_205
-; CHECK-RV64-NEXT:  .LBB61_719: # %cond.load789
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 199
-; CHECK-RV64-NEXT:    li a3, 198
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    andi a2, a1, 128
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_720
-; CHECK-RV64-NEXT:    j .LBB61_206
-; CHECK-RV64-NEXT:  .LBB61_720: # %cond.load793
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 200
-; CHECK-RV64-NEXT:    li a3, 199
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    andi a2, a1, 256
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_721
-; CHECK-RV64-NEXT:    j .LBB61_207
-; CHECK-RV64-NEXT:  .LBB61_721: # %cond.load797
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 201
-; CHECK-RV64-NEXT:    li a3, 200
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    andi a2, a1, 512
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_722
-; CHECK-RV64-NEXT:    j .LBB61_208
-; CHECK-RV64-NEXT:  .LBB61_722: # %cond.load801
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 202
-; CHECK-RV64-NEXT:    li a3, 201
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    andi a2, a1, 1024
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_723
-; CHECK-RV64-NEXT:    j .LBB61_209
-; CHECK-RV64-NEXT:  .LBB61_723: # %cond.load805
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 203
-; CHECK-RV64-NEXT:    li a3, 202
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 52
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_724
-; CHECK-RV64-NEXT:    j .LBB61_210
-; CHECK-RV64-NEXT:  .LBB61_724: # %cond.load809
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 204
-; CHECK-RV64-NEXT:    li a3, 203
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 51
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_725
-; CHECK-RV64-NEXT:    j .LBB61_211
-; CHECK-RV64-NEXT:  .LBB61_725: # %cond.load813
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 205
-; CHECK-RV64-NEXT:    li a3, 204
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 50
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_726
-; CHECK-RV64-NEXT:    j .LBB61_212
-; CHECK-RV64-NEXT:  .LBB61_726: # %cond.load817
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 206
-; CHECK-RV64-NEXT:    li a3, 205
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 49
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_727
-; CHECK-RV64-NEXT:    j .LBB61_213
-; CHECK-RV64-NEXT:  .LBB61_727: # %cond.load821
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 207
-; CHECK-RV64-NEXT:    li a3, 206
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 48
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_728
-; CHECK-RV64-NEXT:    j .LBB61_214
-; CHECK-RV64-NEXT:  .LBB61_728: # %cond.load825
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 208
-; CHECK-RV64-NEXT:    li a3, 207
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 47
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_729
-; CHECK-RV64-NEXT:    j .LBB61_215
-; CHECK-RV64-NEXT:  .LBB61_729: # %cond.load829
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 209
-; CHECK-RV64-NEXT:    li a3, 208
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 46
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_730
-; CHECK-RV64-NEXT:    j .LBB61_216
-; CHECK-RV64-NEXT:  .LBB61_730: # %cond.load833
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 210
-; CHECK-RV64-NEXT:    li a3, 209
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 45
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_731
-; CHECK-RV64-NEXT:    j .LBB61_217
-; CHECK-RV64-NEXT:  .LBB61_731: # %cond.load837
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 211
-; CHECK-RV64-NEXT:    li a3, 210
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 44
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_732
-; CHECK-RV64-NEXT:    j .LBB61_218
-; CHECK-RV64-NEXT:  .LBB61_732: # %cond.load841
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 212
-; CHECK-RV64-NEXT:    li a3, 211
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 43
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_733
-; CHECK-RV64-NEXT:    j .LBB61_219
-; CHECK-RV64-NEXT:  .LBB61_733: # %cond.load845
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 213
-; CHECK-RV64-NEXT:    li a3, 212
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 42
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_734
-; CHECK-RV64-NEXT:    j .LBB61_220
-; CHECK-RV64-NEXT:  .LBB61_734: # %cond.load849
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 214
-; CHECK-RV64-NEXT:    li a3, 213
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 41
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_735
-; CHECK-RV64-NEXT:    j .LBB61_221
-; CHECK-RV64-NEXT:  .LBB61_735: # %cond.load853
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 215
-; CHECK-RV64-NEXT:    li a3, 214
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 40
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_736
-; CHECK-RV64-NEXT:    j .LBB61_222
-; CHECK-RV64-NEXT:  .LBB61_736: # %cond.load857
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 216
-; CHECK-RV64-NEXT:    li a3, 215
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 39
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_737
-; CHECK-RV64-NEXT:    j .LBB61_223
-; CHECK-RV64-NEXT:  .LBB61_737: # %cond.load861
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 217
-; CHECK-RV64-NEXT:    li a3, 216
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 38
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_738
-; CHECK-RV64-NEXT:    j .LBB61_224
-; CHECK-RV64-NEXT:  .LBB61_738: # %cond.load865
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 218
-; CHECK-RV64-NEXT:    li a3, 217
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 37
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_739
-; CHECK-RV64-NEXT:    j .LBB61_225
-; CHECK-RV64-NEXT:  .LBB61_739: # %cond.load869
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 219
-; CHECK-RV64-NEXT:    li a3, 218
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 36
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_740
-; CHECK-RV64-NEXT:    j .LBB61_226
-; CHECK-RV64-NEXT:  .LBB61_740: # %cond.load873
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 220
-; CHECK-RV64-NEXT:    li a3, 219
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 35
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_741
-; CHECK-RV64-NEXT:    j .LBB61_227
-; CHECK-RV64-NEXT:  .LBB61_741: # %cond.load877
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 221
-; CHECK-RV64-NEXT:    li a3, 220
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 34
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_742
-; CHECK-RV64-NEXT:    j .LBB61_228
-; CHECK-RV64-NEXT:  .LBB61_742: # %cond.load881
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 222
-; CHECK-RV64-NEXT:    li a3, 221
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 33
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_743
-; CHECK-RV64-NEXT:    j .LBB61_229
-; CHECK-RV64-NEXT:  .LBB61_743: # %cond.load885
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 223
-; CHECK-RV64-NEXT:    li a3, 222
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 32
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_744
-; CHECK-RV64-NEXT:    j .LBB61_230
-; CHECK-RV64-NEXT:  .LBB61_744: # %cond.load889
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 224
-; CHECK-RV64-NEXT:    li a3, 223
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 31
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_745
-; CHECK-RV64-NEXT:    j .LBB61_231
-; CHECK-RV64-NEXT:  .LBB61_745: # %cond.load893
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 225
-; CHECK-RV64-NEXT:    li a3, 224
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 30
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_746
-; CHECK-RV64-NEXT:    j .LBB61_232
-; CHECK-RV64-NEXT:  .LBB61_746: # %cond.load897
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 226
-; CHECK-RV64-NEXT:    li a3, 225
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 29
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_747
-; CHECK-RV64-NEXT:    j .LBB61_233
-; CHECK-RV64-NEXT:  .LBB61_747: # %cond.load901
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 227
-; CHECK-RV64-NEXT:    li a3, 226
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 28
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_748
-; CHECK-RV64-NEXT:    j .LBB61_234
-; CHECK-RV64-NEXT:  .LBB61_748: # %cond.load905
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 228
-; CHECK-RV64-NEXT:    li a3, 227
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 27
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_749
-; CHECK-RV64-NEXT:    j .LBB61_235
-; CHECK-RV64-NEXT:  .LBB61_749: # %cond.load909
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 229
-; CHECK-RV64-NEXT:    li a3, 228
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 26
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_750
-; CHECK-RV64-NEXT:    j .LBB61_236
-; CHECK-RV64-NEXT:  .LBB61_750: # %cond.load913
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 230
-; CHECK-RV64-NEXT:    li a3, 229
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 25
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_751
-; CHECK-RV64-NEXT:    j .LBB61_237
-; CHECK-RV64-NEXT:  .LBB61_751: # %cond.load917
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 231
-; CHECK-RV64-NEXT:    li a3, 230
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 24
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_752
-; CHECK-RV64-NEXT:    j .LBB61_238
-; CHECK-RV64-NEXT:  .LBB61_752: # %cond.load921
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 232
-; CHECK-RV64-NEXT:    li a3, 231
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 23
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_753
-; CHECK-RV64-NEXT:    j .LBB61_239
-; CHECK-RV64-NEXT:  .LBB61_753: # %cond.load925
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 233
-; CHECK-RV64-NEXT:    li a3, 232
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 22
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_754
-; CHECK-RV64-NEXT:    j .LBB61_240
-; CHECK-RV64-NEXT:  .LBB61_754: # %cond.load929
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 234
-; CHECK-RV64-NEXT:    li a3, 233
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 21
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_755
-; CHECK-RV64-NEXT:    j .LBB61_241
-; CHECK-RV64-NEXT:  .LBB61_755: # %cond.load933
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 235
-; CHECK-RV64-NEXT:    li a3, 234
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 20
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_756
-; CHECK-RV64-NEXT:    j .LBB61_242
-; CHECK-RV64-NEXT:  .LBB61_756: # %cond.load937
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 236
-; CHECK-RV64-NEXT:    li a3, 235
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 19
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_757
-; CHECK-RV64-NEXT:    j .LBB61_243
-; CHECK-RV64-NEXT:  .LBB61_757: # %cond.load941
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 237
-; CHECK-RV64-NEXT:    li a3, 236
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 18
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_758
-; CHECK-RV64-NEXT:    j .LBB61_244
-; CHECK-RV64-NEXT:  .LBB61_758: # %cond.load945
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 238
-; CHECK-RV64-NEXT:    li a3, 237
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 17
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_759
-; CHECK-RV64-NEXT:    j .LBB61_245
-; CHECK-RV64-NEXT:  .LBB61_759: # %cond.load949
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 239
-; CHECK-RV64-NEXT:    li a3, 238
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 16
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_760
-; CHECK-RV64-NEXT:    j .LBB61_246
-; CHECK-RV64-NEXT:  .LBB61_760: # %cond.load953
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 240
-; CHECK-RV64-NEXT:    li a3, 239
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 15
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_761
-; CHECK-RV64-NEXT:    j .LBB61_247
-; CHECK-RV64-NEXT:  .LBB61_761: # %cond.load957
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 241
-; CHECK-RV64-NEXT:    li a3, 240
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 14
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_762
-; CHECK-RV64-NEXT:    j .LBB61_248
-; CHECK-RV64-NEXT:  .LBB61_762: # %cond.load961
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 242
-; CHECK-RV64-NEXT:    li a3, 241
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 13
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_763
-; CHECK-RV64-NEXT:    j .LBB61_249
-; CHECK-RV64-NEXT:  .LBB61_763: # %cond.load965
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 243
-; CHECK-RV64-NEXT:    li a3, 242
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 12
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_764
-; CHECK-RV64-NEXT:    j .LBB61_250
-; CHECK-RV64-NEXT:  .LBB61_764: # %cond.load969
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 244
-; CHECK-RV64-NEXT:    li a3, 243
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 11
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_765
-; CHECK-RV64-NEXT:    j .LBB61_251
-; CHECK-RV64-NEXT:  .LBB61_765: # %cond.load973
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 245
-; CHECK-RV64-NEXT:    li a3, 244
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 10
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_766
-; CHECK-RV64-NEXT:    j .LBB61_252
-; CHECK-RV64-NEXT:  .LBB61_766: # %cond.load977
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 246
-; CHECK-RV64-NEXT:    li a3, 245
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 9
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_767
-; CHECK-RV64-NEXT:    j .LBB61_253
-; CHECK-RV64-NEXT:  .LBB61_767: # %cond.load981
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 247
-; CHECK-RV64-NEXT:    li a3, 246
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 8
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_768
-; CHECK-RV64-NEXT:    j .LBB61_254
-; CHECK-RV64-NEXT:  .LBB61_768: # %cond.load985
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 248
-; CHECK-RV64-NEXT:    li a3, 247
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 7
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_769
-; CHECK-RV64-NEXT:    j .LBB61_255
-; CHECK-RV64-NEXT:  .LBB61_769: # %cond.load989
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 249
-; CHECK-RV64-NEXT:    li a3, 248
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 6
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_770
-; CHECK-RV64-NEXT:    j .LBB61_256
-; CHECK-RV64-NEXT:  .LBB61_770: # %cond.load993
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 250
-; CHECK-RV64-NEXT:    li a3, 249
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 5
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_771
-; CHECK-RV64-NEXT:    j .LBB61_257
-; CHECK-RV64-NEXT:  .LBB61_771: # %cond.load997
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 251
-; CHECK-RV64-NEXT:    li a3, 250
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 4
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_772
-; CHECK-RV64-NEXT:    j .LBB61_258
-; CHECK-RV64-NEXT:  .LBB61_772: # %cond.load1001
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 252
-; CHECK-RV64-NEXT:    li a3, 251
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 3
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_773
-; CHECK-RV64-NEXT:    j .LBB61_259
-; CHECK-RV64-NEXT:  .LBB61_773: # %cond.load1005
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a2, 253
-; CHECK-RV64-NEXT:    li a3, 252
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    slli a2, a1, 2
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_1028
-; CHECK-RV64-NEXT:    j .LBB61_260
-; CHECK-RV64-NEXT:  .LBB61_1028: # %cond.load1005
-; CHECK-RV64-NEXT:    j .LBB61_261
-; CHECK-RV64-NEXT:  .LBB61_774: # %cond.load1017
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-RV64-NEXT:    li a1, 256
-; CHECK-RV64-NEXT:    li a3, 255
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-RV64-NEXT:    andi a1, a2, 1
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_775
-; CHECK-RV64-NEXT:    j .LBB61_265
-; CHECK-RV64-NEXT:  .LBB61_775: # %cond.load1021
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 257
-; CHECK-RV64-NEXT:    li a3, 256
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a1, a2, 2
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_776
-; CHECK-RV64-NEXT:    j .LBB61_266
-; CHECK-RV64-NEXT:  .LBB61_776: # %cond.load1025
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 258
-; CHECK-RV64-NEXT:    li a3, 257
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a1, a2, 4
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_777
-; CHECK-RV64-NEXT:    j .LBB61_267
-; CHECK-RV64-NEXT:  .LBB61_777: # %cond.load1029
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 259
-; CHECK-RV64-NEXT:    li a3, 258
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a1, a2, 8
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_778
-; CHECK-RV64-NEXT:    j .LBB61_268
-; CHECK-RV64-NEXT:  .LBB61_778: # %cond.load1033
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 260
-; CHECK-RV64-NEXT:    li a3, 259
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a1, a2, 16
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_779
-; CHECK-RV64-NEXT:    j .LBB61_269
-; CHECK-RV64-NEXT:  .LBB61_779: # %cond.load1037
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 261
-; CHECK-RV64-NEXT:    li a3, 260
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a1, a2, 32
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_780
-; CHECK-RV64-NEXT:    j .LBB61_270
-; CHECK-RV64-NEXT:  .LBB61_780: # %cond.load1041
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 262
-; CHECK-RV64-NEXT:    li a3, 261
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a1, a2, 64
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_781
-; CHECK-RV64-NEXT:    j .LBB61_271
-; CHECK-RV64-NEXT:  .LBB61_781: # %cond.load1045
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 263
-; CHECK-RV64-NEXT:    li a3, 262
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a1, a2, 128
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_782
-; CHECK-RV64-NEXT:    j .LBB61_272
-; CHECK-RV64-NEXT:  .LBB61_782: # %cond.load1049
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 264
-; CHECK-RV64-NEXT:    li a3, 263
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a1, a2, 256
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_783
-; CHECK-RV64-NEXT:    j .LBB61_273
-; CHECK-RV64-NEXT:  .LBB61_783: # %cond.load1053
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 265
-; CHECK-RV64-NEXT:    li a3, 264
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a1, a2, 512
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_784
-; CHECK-RV64-NEXT:    j .LBB61_274
-; CHECK-RV64-NEXT:  .LBB61_784: # %cond.load1057
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 266
-; CHECK-RV64-NEXT:    li a3, 265
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a1, a2, 1024
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_785
-; CHECK-RV64-NEXT:    j .LBB61_275
-; CHECK-RV64-NEXT:  .LBB61_785: # %cond.load1061
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 267
-; CHECK-RV64-NEXT:    li a3, 266
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 52
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_786
-; CHECK-RV64-NEXT:    j .LBB61_276
-; CHECK-RV64-NEXT:  .LBB61_786: # %cond.load1065
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 268
-; CHECK-RV64-NEXT:    li a3, 267
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 51
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_787
-; CHECK-RV64-NEXT:    j .LBB61_277
-; CHECK-RV64-NEXT:  .LBB61_787: # %cond.load1069
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 269
-; CHECK-RV64-NEXT:    li a3, 268
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 50
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_788
-; CHECK-RV64-NEXT:    j .LBB61_278
-; CHECK-RV64-NEXT:  .LBB61_788: # %cond.load1073
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 270
-; CHECK-RV64-NEXT:    li a3, 269
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 49
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_789
-; CHECK-RV64-NEXT:    j .LBB61_279
-; CHECK-RV64-NEXT:  .LBB61_789: # %cond.load1077
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 271
-; CHECK-RV64-NEXT:    li a3, 270
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 48
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_790
-; CHECK-RV64-NEXT:    j .LBB61_280
-; CHECK-RV64-NEXT:  .LBB61_790: # %cond.load1081
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 272
-; CHECK-RV64-NEXT:    li a3, 271
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 47
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_791
-; CHECK-RV64-NEXT:    j .LBB61_281
-; CHECK-RV64-NEXT:  .LBB61_791: # %cond.load1085
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 273
-; CHECK-RV64-NEXT:    li a3, 272
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 46
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_792
-; CHECK-RV64-NEXT:    j .LBB61_282
-; CHECK-RV64-NEXT:  .LBB61_792: # %cond.load1089
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 274
-; CHECK-RV64-NEXT:    li a3, 273
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 45
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_793
-; CHECK-RV64-NEXT:    j .LBB61_283
-; CHECK-RV64-NEXT:  .LBB61_793: # %cond.load1093
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 275
-; CHECK-RV64-NEXT:    li a3, 274
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 44
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_794
-; CHECK-RV64-NEXT:    j .LBB61_284
-; CHECK-RV64-NEXT:  .LBB61_794: # %cond.load1097
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 276
-; CHECK-RV64-NEXT:    li a3, 275
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 43
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_795
-; CHECK-RV64-NEXT:    j .LBB61_285
-; CHECK-RV64-NEXT:  .LBB61_795: # %cond.load1101
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 277
-; CHECK-RV64-NEXT:    li a3, 276
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 42
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_796
-; CHECK-RV64-NEXT:    j .LBB61_286
-; CHECK-RV64-NEXT:  .LBB61_796: # %cond.load1105
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 278
-; CHECK-RV64-NEXT:    li a3, 277
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 41
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_797
-; CHECK-RV64-NEXT:    j .LBB61_287
-; CHECK-RV64-NEXT:  .LBB61_797: # %cond.load1109
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 279
-; CHECK-RV64-NEXT:    li a3, 278
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 40
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_798
-; CHECK-RV64-NEXT:    j .LBB61_288
-; CHECK-RV64-NEXT:  .LBB61_798: # %cond.load1113
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 280
-; CHECK-RV64-NEXT:    li a3, 279
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 39
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_799
-; CHECK-RV64-NEXT:    j .LBB61_289
-; CHECK-RV64-NEXT:  .LBB61_799: # %cond.load1117
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 281
-; CHECK-RV64-NEXT:    li a3, 280
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 38
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_800
-; CHECK-RV64-NEXT:    j .LBB61_290
-; CHECK-RV64-NEXT:  .LBB61_800: # %cond.load1121
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 282
-; CHECK-RV64-NEXT:    li a3, 281
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 37
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_801
-; CHECK-RV64-NEXT:    j .LBB61_291
-; CHECK-RV64-NEXT:  .LBB61_801: # %cond.load1125
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 283
-; CHECK-RV64-NEXT:    li a3, 282
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 36
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_802
-; CHECK-RV64-NEXT:    j .LBB61_292
-; CHECK-RV64-NEXT:  .LBB61_802: # %cond.load1129
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 284
-; CHECK-RV64-NEXT:    li a3, 283
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 35
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_803
-; CHECK-RV64-NEXT:    j .LBB61_293
-; CHECK-RV64-NEXT:  .LBB61_803: # %cond.load1133
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 285
-; CHECK-RV64-NEXT:    li a3, 284
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 34
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_804
-; CHECK-RV64-NEXT:    j .LBB61_294
-; CHECK-RV64-NEXT:  .LBB61_804: # %cond.load1137
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 286
-; CHECK-RV64-NEXT:    li a3, 285
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 33
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_805
-; CHECK-RV64-NEXT:    j .LBB61_295
-; CHECK-RV64-NEXT:  .LBB61_805: # %cond.load1141
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 287
-; CHECK-RV64-NEXT:    li a3, 286
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 32
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_806
-; CHECK-RV64-NEXT:    j .LBB61_296
-; CHECK-RV64-NEXT:  .LBB61_806: # %cond.load1145
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 288
-; CHECK-RV64-NEXT:    li a3, 287
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 31
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_807
-; CHECK-RV64-NEXT:    j .LBB61_297
-; CHECK-RV64-NEXT:  .LBB61_807: # %cond.load1149
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 289
-; CHECK-RV64-NEXT:    li a3, 288
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 30
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_808
-; CHECK-RV64-NEXT:    j .LBB61_298
-; CHECK-RV64-NEXT:  .LBB61_808: # %cond.load1153
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 290
-; CHECK-RV64-NEXT:    li a3, 289
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 29
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_809
-; CHECK-RV64-NEXT:    j .LBB61_299
-; CHECK-RV64-NEXT:  .LBB61_809: # %cond.load1157
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 291
-; CHECK-RV64-NEXT:    li a3, 290
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 28
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_810
-; CHECK-RV64-NEXT:    j .LBB61_300
-; CHECK-RV64-NEXT:  .LBB61_810: # %cond.load1161
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 292
-; CHECK-RV64-NEXT:    li a3, 291
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 27
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_811
-; CHECK-RV64-NEXT:    j .LBB61_301
-; CHECK-RV64-NEXT:  .LBB61_811: # %cond.load1165
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 293
-; CHECK-RV64-NEXT:    li a3, 292
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 26
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_812
-; CHECK-RV64-NEXT:    j .LBB61_302
-; CHECK-RV64-NEXT:  .LBB61_812: # %cond.load1169
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 294
-; CHECK-RV64-NEXT:    li a3, 293
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 25
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_813
-; CHECK-RV64-NEXT:    j .LBB61_303
-; CHECK-RV64-NEXT:  .LBB61_813: # %cond.load1173
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 295
-; CHECK-RV64-NEXT:    li a3, 294
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 24
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_814
-; CHECK-RV64-NEXT:    j .LBB61_304
-; CHECK-RV64-NEXT:  .LBB61_814: # %cond.load1177
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 296
-; CHECK-RV64-NEXT:    li a3, 295
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 23
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_815
-; CHECK-RV64-NEXT:    j .LBB61_305
-; CHECK-RV64-NEXT:  .LBB61_815: # %cond.load1181
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 297
-; CHECK-RV64-NEXT:    li a3, 296
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 22
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_816
-; CHECK-RV64-NEXT:    j .LBB61_306
-; CHECK-RV64-NEXT:  .LBB61_816: # %cond.load1185
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 298
-; CHECK-RV64-NEXT:    li a3, 297
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 21
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_817
-; CHECK-RV64-NEXT:    j .LBB61_307
-; CHECK-RV64-NEXT:  .LBB61_817: # %cond.load1189
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 299
-; CHECK-RV64-NEXT:    li a3, 298
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 20
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_818
-; CHECK-RV64-NEXT:    j .LBB61_308
-; CHECK-RV64-NEXT:  .LBB61_818: # %cond.load1193
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 300
-; CHECK-RV64-NEXT:    li a3, 299
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 19
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_819
-; CHECK-RV64-NEXT:    j .LBB61_309
-; CHECK-RV64-NEXT:  .LBB61_819: # %cond.load1197
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 301
-; CHECK-RV64-NEXT:    li a3, 300
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 18
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_820
-; CHECK-RV64-NEXT:    j .LBB61_310
-; CHECK-RV64-NEXT:  .LBB61_820: # %cond.load1201
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 302
-; CHECK-RV64-NEXT:    li a3, 301
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 17
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_821
-; CHECK-RV64-NEXT:    j .LBB61_311
-; CHECK-RV64-NEXT:  .LBB61_821: # %cond.load1205
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 303
-; CHECK-RV64-NEXT:    li a3, 302
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 16
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_822
-; CHECK-RV64-NEXT:    j .LBB61_312
-; CHECK-RV64-NEXT:  .LBB61_822: # %cond.load1209
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 304
-; CHECK-RV64-NEXT:    li a3, 303
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 15
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_823
-; CHECK-RV64-NEXT:    j .LBB61_313
-; CHECK-RV64-NEXT:  .LBB61_823: # %cond.load1213
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 305
-; CHECK-RV64-NEXT:    li a3, 304
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 14
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_824
-; CHECK-RV64-NEXT:    j .LBB61_314
-; CHECK-RV64-NEXT:  .LBB61_824: # %cond.load1217
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 306
-; CHECK-RV64-NEXT:    li a3, 305
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 13
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_825
-; CHECK-RV64-NEXT:    j .LBB61_315
-; CHECK-RV64-NEXT:  .LBB61_825: # %cond.load1221
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 307
-; CHECK-RV64-NEXT:    li a3, 306
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 12
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_826
-; CHECK-RV64-NEXT:    j .LBB61_316
-; CHECK-RV64-NEXT:  .LBB61_826: # %cond.load1225
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 308
-; CHECK-RV64-NEXT:    li a3, 307
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 11
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_827
-; CHECK-RV64-NEXT:    j .LBB61_317
-; CHECK-RV64-NEXT:  .LBB61_827: # %cond.load1229
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 309
-; CHECK-RV64-NEXT:    li a3, 308
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 10
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_828
-; CHECK-RV64-NEXT:    j .LBB61_318
-; CHECK-RV64-NEXT:  .LBB61_828: # %cond.load1233
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 310
-; CHECK-RV64-NEXT:    li a3, 309
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 9
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_829
-; CHECK-RV64-NEXT:    j .LBB61_319
-; CHECK-RV64-NEXT:  .LBB61_829: # %cond.load1237
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 311
-; CHECK-RV64-NEXT:    li a3, 310
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 8
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_830
-; CHECK-RV64-NEXT:    j .LBB61_320
-; CHECK-RV64-NEXT:  .LBB61_830: # %cond.load1241
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 312
-; CHECK-RV64-NEXT:    li a3, 311
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 7
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_831
-; CHECK-RV64-NEXT:    j .LBB61_321
-; CHECK-RV64-NEXT:  .LBB61_831: # %cond.load1245
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 313
-; CHECK-RV64-NEXT:    li a3, 312
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 6
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_832
-; CHECK-RV64-NEXT:    j .LBB61_322
-; CHECK-RV64-NEXT:  .LBB61_832: # %cond.load1249
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 314
-; CHECK-RV64-NEXT:    li a3, 313
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 5
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_833
-; CHECK-RV64-NEXT:    j .LBB61_323
-; CHECK-RV64-NEXT:  .LBB61_833: # %cond.load1253
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 315
-; CHECK-RV64-NEXT:    li a3, 314
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 4
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_834
-; CHECK-RV64-NEXT:    j .LBB61_324
-; CHECK-RV64-NEXT:  .LBB61_834: # %cond.load1257
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 316
-; CHECK-RV64-NEXT:    li a3, 315
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 3
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_835
-; CHECK-RV64-NEXT:    j .LBB61_325
-; CHECK-RV64-NEXT:  .LBB61_835: # %cond.load1261
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 317
-; CHECK-RV64-NEXT:    li a3, 316
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 2
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_1029
-; CHECK-RV64-NEXT:    j .LBB61_326
-; CHECK-RV64-NEXT:  .LBB61_1029: # %cond.load1261
-; CHECK-RV64-NEXT:    j .LBB61_327
-; CHECK-RV64-NEXT:  .LBB61_836: # %cond.load1273
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 320
-; CHECK-RV64-NEXT:    li a3, 319
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a2, a1, 1
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_837
-; CHECK-RV64-NEXT:    j .LBB61_331
-; CHECK-RV64-NEXT:  .LBB61_837: # %cond.load1277
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 321
-; CHECK-RV64-NEXT:    li a3, 320
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a2, a1, 2
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_838
-; CHECK-RV64-NEXT:    j .LBB61_332
-; CHECK-RV64-NEXT:  .LBB61_838: # %cond.load1281
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 322
-; CHECK-RV64-NEXT:    li a3, 321
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a2, a1, 4
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_839
-; CHECK-RV64-NEXT:    j .LBB61_333
-; CHECK-RV64-NEXT:  .LBB61_839: # %cond.load1285
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 323
-; CHECK-RV64-NEXT:    li a3, 322
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a2, a1, 8
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_840
-; CHECK-RV64-NEXT:    j .LBB61_334
-; CHECK-RV64-NEXT:  .LBB61_840: # %cond.load1289
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 324
-; CHECK-RV64-NEXT:    li a3, 323
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a2, a1, 16
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_841
-; CHECK-RV64-NEXT:    j .LBB61_335
-; CHECK-RV64-NEXT:  .LBB61_841: # %cond.load1293
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 325
-; CHECK-RV64-NEXT:    li a3, 324
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a2, a1, 32
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_842
-; CHECK-RV64-NEXT:    j .LBB61_336
-; CHECK-RV64-NEXT:  .LBB61_842: # %cond.load1297
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 326
-; CHECK-RV64-NEXT:    li a3, 325
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a2, a1, 64
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_843
-; CHECK-RV64-NEXT:    j .LBB61_337
-; CHECK-RV64-NEXT:  .LBB61_843: # %cond.load1301
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 327
-; CHECK-RV64-NEXT:    li a3, 326
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a2, a1, 128
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_844
-; CHECK-RV64-NEXT:    j .LBB61_338
-; CHECK-RV64-NEXT:  .LBB61_844: # %cond.load1305
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 328
-; CHECK-RV64-NEXT:    li a3, 327
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a2, a1, 256
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_845
-; CHECK-RV64-NEXT:    j .LBB61_339
-; CHECK-RV64-NEXT:  .LBB61_845: # %cond.load1309
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 329
-; CHECK-RV64-NEXT:    li a3, 328
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a2, a1, 512
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_846
-; CHECK-RV64-NEXT:    j .LBB61_340
-; CHECK-RV64-NEXT:  .LBB61_846: # %cond.load1313
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 330
-; CHECK-RV64-NEXT:    li a3, 329
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a2, a1, 1024
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_847
-; CHECK-RV64-NEXT:    j .LBB61_341
-; CHECK-RV64-NEXT:  .LBB61_847: # %cond.load1317
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 331
-; CHECK-RV64-NEXT:    li a3, 330
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 52
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_848
-; CHECK-RV64-NEXT:    j .LBB61_342
-; CHECK-RV64-NEXT:  .LBB61_848: # %cond.load1321
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 332
-; CHECK-RV64-NEXT:    li a3, 331
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 51
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_849
-; CHECK-RV64-NEXT:    j .LBB61_343
-; CHECK-RV64-NEXT:  .LBB61_849: # %cond.load1325
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 333
-; CHECK-RV64-NEXT:    li a3, 332
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 50
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_850
-; CHECK-RV64-NEXT:    j .LBB61_344
-; CHECK-RV64-NEXT:  .LBB61_850: # %cond.load1329
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 334
-; CHECK-RV64-NEXT:    li a3, 333
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 49
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_851
-; CHECK-RV64-NEXT:    j .LBB61_345
-; CHECK-RV64-NEXT:  .LBB61_851: # %cond.load1333
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 335
-; CHECK-RV64-NEXT:    li a3, 334
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 48
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_852
-; CHECK-RV64-NEXT:    j .LBB61_346
-; CHECK-RV64-NEXT:  .LBB61_852: # %cond.load1337
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 336
-; CHECK-RV64-NEXT:    li a3, 335
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 47
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_853
-; CHECK-RV64-NEXT:    j .LBB61_347
-; CHECK-RV64-NEXT:  .LBB61_853: # %cond.load1341
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 337
-; CHECK-RV64-NEXT:    li a3, 336
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 46
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_854
-; CHECK-RV64-NEXT:    j .LBB61_348
-; CHECK-RV64-NEXT:  .LBB61_854: # %cond.load1345
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 338
-; CHECK-RV64-NEXT:    li a3, 337
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 45
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_855
-; CHECK-RV64-NEXT:    j .LBB61_349
-; CHECK-RV64-NEXT:  .LBB61_855: # %cond.load1349
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 339
-; CHECK-RV64-NEXT:    li a3, 338
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 44
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_856
-; CHECK-RV64-NEXT:    j .LBB61_350
-; CHECK-RV64-NEXT:  .LBB61_856: # %cond.load1353
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 340
-; CHECK-RV64-NEXT:    li a3, 339
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 43
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_857
-; CHECK-RV64-NEXT:    j .LBB61_351
-; CHECK-RV64-NEXT:  .LBB61_857: # %cond.load1357
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 341
-; CHECK-RV64-NEXT:    li a3, 340
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 42
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_858
-; CHECK-RV64-NEXT:    j .LBB61_352
-; CHECK-RV64-NEXT:  .LBB61_858: # %cond.load1361
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 342
-; CHECK-RV64-NEXT:    li a3, 341
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 41
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_859
-; CHECK-RV64-NEXT:    j .LBB61_353
-; CHECK-RV64-NEXT:  .LBB61_859: # %cond.load1365
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 343
-; CHECK-RV64-NEXT:    li a3, 342
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 40
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_860
-; CHECK-RV64-NEXT:    j .LBB61_354
-; CHECK-RV64-NEXT:  .LBB61_860: # %cond.load1369
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 344
-; CHECK-RV64-NEXT:    li a3, 343
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 39
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_861
-; CHECK-RV64-NEXT:    j .LBB61_355
-; CHECK-RV64-NEXT:  .LBB61_861: # %cond.load1373
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 345
-; CHECK-RV64-NEXT:    li a3, 344
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 38
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_862
-; CHECK-RV64-NEXT:    j .LBB61_356
-; CHECK-RV64-NEXT:  .LBB61_862: # %cond.load1377
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 346
-; CHECK-RV64-NEXT:    li a3, 345
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 37
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_863
-; CHECK-RV64-NEXT:    j .LBB61_357
-; CHECK-RV64-NEXT:  .LBB61_863: # %cond.load1381
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 347
-; CHECK-RV64-NEXT:    li a3, 346
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 36
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_864
-; CHECK-RV64-NEXT:    j .LBB61_358
-; CHECK-RV64-NEXT:  .LBB61_864: # %cond.load1385
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 348
-; CHECK-RV64-NEXT:    li a3, 347
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 35
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_865
-; CHECK-RV64-NEXT:    j .LBB61_359
-; CHECK-RV64-NEXT:  .LBB61_865: # %cond.load1389
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 349
-; CHECK-RV64-NEXT:    li a3, 348
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 34
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_866
-; CHECK-RV64-NEXT:    j .LBB61_360
-; CHECK-RV64-NEXT:  .LBB61_866: # %cond.load1393
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 350
-; CHECK-RV64-NEXT:    li a3, 349
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 33
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_867
-; CHECK-RV64-NEXT:    j .LBB61_361
-; CHECK-RV64-NEXT:  .LBB61_867: # %cond.load1397
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 351
-; CHECK-RV64-NEXT:    li a3, 350
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 32
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_868
-; CHECK-RV64-NEXT:    j .LBB61_362
-; CHECK-RV64-NEXT:  .LBB61_868: # %cond.load1401
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 352
-; CHECK-RV64-NEXT:    li a3, 351
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 31
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_869
-; CHECK-RV64-NEXT:    j .LBB61_363
-; CHECK-RV64-NEXT:  .LBB61_869: # %cond.load1405
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 353
-; CHECK-RV64-NEXT:    li a3, 352
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 30
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_870
-; CHECK-RV64-NEXT:    j .LBB61_364
-; CHECK-RV64-NEXT:  .LBB61_870: # %cond.load1409
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 354
-; CHECK-RV64-NEXT:    li a3, 353
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 29
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_871
-; CHECK-RV64-NEXT:    j .LBB61_365
-; CHECK-RV64-NEXT:  .LBB61_871: # %cond.load1413
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 355
-; CHECK-RV64-NEXT:    li a3, 354
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 28
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_872
-; CHECK-RV64-NEXT:    j .LBB61_366
-; CHECK-RV64-NEXT:  .LBB61_872: # %cond.load1417
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 356
-; CHECK-RV64-NEXT:    li a3, 355
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 27
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_873
-; CHECK-RV64-NEXT:    j .LBB61_367
-; CHECK-RV64-NEXT:  .LBB61_873: # %cond.load1421
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 357
-; CHECK-RV64-NEXT:    li a3, 356
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 26
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_874
-; CHECK-RV64-NEXT:    j .LBB61_368
-; CHECK-RV64-NEXT:  .LBB61_874: # %cond.load1425
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 358
-; CHECK-RV64-NEXT:    li a3, 357
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 25
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_875
-; CHECK-RV64-NEXT:    j .LBB61_369
-; CHECK-RV64-NEXT:  .LBB61_875: # %cond.load1429
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 359
-; CHECK-RV64-NEXT:    li a3, 358
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 24
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_876
-; CHECK-RV64-NEXT:    j .LBB61_370
-; CHECK-RV64-NEXT:  .LBB61_876: # %cond.load1433
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 360
-; CHECK-RV64-NEXT:    li a3, 359
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 23
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_877
-; CHECK-RV64-NEXT:    j .LBB61_371
-; CHECK-RV64-NEXT:  .LBB61_877: # %cond.load1437
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 361
-; CHECK-RV64-NEXT:    li a3, 360
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 22
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_878
-; CHECK-RV64-NEXT:    j .LBB61_372
-; CHECK-RV64-NEXT:  .LBB61_878: # %cond.load1441
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 362
-; CHECK-RV64-NEXT:    li a3, 361
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 21
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_879
-; CHECK-RV64-NEXT:    j .LBB61_373
-; CHECK-RV64-NEXT:  .LBB61_879: # %cond.load1445
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 363
-; CHECK-RV64-NEXT:    li a3, 362
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 20
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_880
-; CHECK-RV64-NEXT:    j .LBB61_374
-; CHECK-RV64-NEXT:  .LBB61_880: # %cond.load1449
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 364
-; CHECK-RV64-NEXT:    li a3, 363
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 19
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_881
-; CHECK-RV64-NEXT:    j .LBB61_375
-; CHECK-RV64-NEXT:  .LBB61_881: # %cond.load1453
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 365
-; CHECK-RV64-NEXT:    li a3, 364
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 18
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_882
-; CHECK-RV64-NEXT:    j .LBB61_376
-; CHECK-RV64-NEXT:  .LBB61_882: # %cond.load1457
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 366
-; CHECK-RV64-NEXT:    li a3, 365
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 17
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_883
-; CHECK-RV64-NEXT:    j .LBB61_377
-; CHECK-RV64-NEXT:  .LBB61_883: # %cond.load1461
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 367
-; CHECK-RV64-NEXT:    li a3, 366
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 16
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_884
-; CHECK-RV64-NEXT:    j .LBB61_378
-; CHECK-RV64-NEXT:  .LBB61_884: # %cond.load1465
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 368
-; CHECK-RV64-NEXT:    li a3, 367
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 15
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_885
-; CHECK-RV64-NEXT:    j .LBB61_379
-; CHECK-RV64-NEXT:  .LBB61_885: # %cond.load1469
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 369
-; CHECK-RV64-NEXT:    li a3, 368
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 14
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_886
-; CHECK-RV64-NEXT:    j .LBB61_380
-; CHECK-RV64-NEXT:  .LBB61_886: # %cond.load1473
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 370
-; CHECK-RV64-NEXT:    li a3, 369
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 13
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_887
-; CHECK-RV64-NEXT:    j .LBB61_381
-; CHECK-RV64-NEXT:  .LBB61_887: # %cond.load1477
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 371
-; CHECK-RV64-NEXT:    li a3, 370
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 12
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_888
-; CHECK-RV64-NEXT:    j .LBB61_382
-; CHECK-RV64-NEXT:  .LBB61_888: # %cond.load1481
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 372
-; CHECK-RV64-NEXT:    li a3, 371
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 11
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_889
-; CHECK-RV64-NEXT:    j .LBB61_383
-; CHECK-RV64-NEXT:  .LBB61_889: # %cond.load1485
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 373
-; CHECK-RV64-NEXT:    li a3, 372
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 10
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_890
-; CHECK-RV64-NEXT:    j .LBB61_384
-; CHECK-RV64-NEXT:  .LBB61_890: # %cond.load1489
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 374
-; CHECK-RV64-NEXT:    li a3, 373
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 9
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_891
-; CHECK-RV64-NEXT:    j .LBB61_385
-; CHECK-RV64-NEXT:  .LBB61_891: # %cond.load1493
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 375
-; CHECK-RV64-NEXT:    li a3, 374
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 8
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_892
-; CHECK-RV64-NEXT:    j .LBB61_386
-; CHECK-RV64-NEXT:  .LBB61_892: # %cond.load1497
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 376
-; CHECK-RV64-NEXT:    li a3, 375
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 7
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_893
-; CHECK-RV64-NEXT:    j .LBB61_387
-; CHECK-RV64-NEXT:  .LBB61_893: # %cond.load1501
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 377
-; CHECK-RV64-NEXT:    li a3, 376
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 6
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_894
-; CHECK-RV64-NEXT:    j .LBB61_388
-; CHECK-RV64-NEXT:  .LBB61_894: # %cond.load1505
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 378
-; CHECK-RV64-NEXT:    li a3, 377
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 5
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_895
-; CHECK-RV64-NEXT:    j .LBB61_389
-; CHECK-RV64-NEXT:  .LBB61_895: # %cond.load1509
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 379
-; CHECK-RV64-NEXT:    li a3, 378
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 4
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_896
-; CHECK-RV64-NEXT:    j .LBB61_390
-; CHECK-RV64-NEXT:  .LBB61_896: # %cond.load1513
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 380
-; CHECK-RV64-NEXT:    li a3, 379
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 3
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_897
-; CHECK-RV64-NEXT:    j .LBB61_391
-; CHECK-RV64-NEXT:  .LBB61_897: # %cond.load1517
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 381
-; CHECK-RV64-NEXT:    li a3, 380
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 2
-; CHECK-RV64-NEXT:    bgez a2, .LBB61_1030
-; CHECK-RV64-NEXT:    j .LBB61_392
-; CHECK-RV64-NEXT:  .LBB61_1030: # %cond.load1517
-; CHECK-RV64-NEXT:    j .LBB61_393
-; CHECK-RV64-NEXT:  .LBB61_898: # %cond.load1529
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 384
-; CHECK-RV64-NEXT:    li a3, 383
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a1, a2, 1
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_899
-; CHECK-RV64-NEXT:    j .LBB61_397
-; CHECK-RV64-NEXT:  .LBB61_899: # %cond.load1533
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 385
-; CHECK-RV64-NEXT:    li a3, 384
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a1, a2, 2
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_900
-; CHECK-RV64-NEXT:    j .LBB61_398
-; CHECK-RV64-NEXT:  .LBB61_900: # %cond.load1537
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 386
-; CHECK-RV64-NEXT:    li a3, 385
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a1, a2, 4
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_901
-; CHECK-RV64-NEXT:    j .LBB61_399
-; CHECK-RV64-NEXT:  .LBB61_901: # %cond.load1541
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 387
-; CHECK-RV64-NEXT:    li a3, 386
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a1, a2, 8
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_902
-; CHECK-RV64-NEXT:    j .LBB61_400
-; CHECK-RV64-NEXT:  .LBB61_902: # %cond.load1545
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 388
-; CHECK-RV64-NEXT:    li a3, 387
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a1, a2, 16
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_903
-; CHECK-RV64-NEXT:    j .LBB61_401
-; CHECK-RV64-NEXT:  .LBB61_903: # %cond.load1549
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 389
-; CHECK-RV64-NEXT:    li a3, 388
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a1, a2, 32
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_904
-; CHECK-RV64-NEXT:    j .LBB61_402
-; CHECK-RV64-NEXT:  .LBB61_904: # %cond.load1553
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 390
-; CHECK-RV64-NEXT:    li a3, 389
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a1, a2, 64
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_905
-; CHECK-RV64-NEXT:    j .LBB61_403
-; CHECK-RV64-NEXT:  .LBB61_905: # %cond.load1557
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 391
-; CHECK-RV64-NEXT:    li a3, 390
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a1, a2, 128
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_906
-; CHECK-RV64-NEXT:    j .LBB61_404
-; CHECK-RV64-NEXT:  .LBB61_906: # %cond.load1561
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 392
-; CHECK-RV64-NEXT:    li a3, 391
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a1, a2, 256
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_907
-; CHECK-RV64-NEXT:    j .LBB61_405
-; CHECK-RV64-NEXT:  .LBB61_907: # %cond.load1565
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 393
-; CHECK-RV64-NEXT:    li a3, 392
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a1, a2, 512
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_908
-; CHECK-RV64-NEXT:    j .LBB61_406
-; CHECK-RV64-NEXT:  .LBB61_908: # %cond.load1569
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 394
-; CHECK-RV64-NEXT:    li a3, 393
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a1, a2, 1024
-; CHECK-RV64-NEXT:    bnez a1, .LBB61_909
-; CHECK-RV64-NEXT:    j .LBB61_407
-; CHECK-RV64-NEXT:  .LBB61_909: # %cond.load1573
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 395
-; CHECK-RV64-NEXT:    li a3, 394
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 52
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_910
-; CHECK-RV64-NEXT:    j .LBB61_408
-; CHECK-RV64-NEXT:  .LBB61_910: # %cond.load1577
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 396
-; CHECK-RV64-NEXT:    li a3, 395
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 51
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_911
-; CHECK-RV64-NEXT:    j .LBB61_409
-; CHECK-RV64-NEXT:  .LBB61_911: # %cond.load1581
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 397
-; CHECK-RV64-NEXT:    li a3, 396
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 50
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_912
-; CHECK-RV64-NEXT:    j .LBB61_410
-; CHECK-RV64-NEXT:  .LBB61_912: # %cond.load1585
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 398
-; CHECK-RV64-NEXT:    li a3, 397
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 49
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_913
-; CHECK-RV64-NEXT:    j .LBB61_411
-; CHECK-RV64-NEXT:  .LBB61_913: # %cond.load1589
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 399
-; CHECK-RV64-NEXT:    li a3, 398
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 48
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_914
-; CHECK-RV64-NEXT:    j .LBB61_412
-; CHECK-RV64-NEXT:  .LBB61_914: # %cond.load1593
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 400
-; CHECK-RV64-NEXT:    li a3, 399
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 47
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_915
-; CHECK-RV64-NEXT:    j .LBB61_413
-; CHECK-RV64-NEXT:  .LBB61_915: # %cond.load1597
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 401
-; CHECK-RV64-NEXT:    li a3, 400
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 46
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_916
-; CHECK-RV64-NEXT:    j .LBB61_414
-; CHECK-RV64-NEXT:  .LBB61_916: # %cond.load1601
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 402
-; CHECK-RV64-NEXT:    li a3, 401
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 45
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_917
-; CHECK-RV64-NEXT:    j .LBB61_415
-; CHECK-RV64-NEXT:  .LBB61_917: # %cond.load1605
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 403
-; CHECK-RV64-NEXT:    li a3, 402
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 44
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_918
-; CHECK-RV64-NEXT:    j .LBB61_416
-; CHECK-RV64-NEXT:  .LBB61_918: # %cond.load1609
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 404
-; CHECK-RV64-NEXT:    li a3, 403
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 43
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_919
-; CHECK-RV64-NEXT:    j .LBB61_417
-; CHECK-RV64-NEXT:  .LBB61_919: # %cond.load1613
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 405
-; CHECK-RV64-NEXT:    li a3, 404
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 42
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_920
-; CHECK-RV64-NEXT:    j .LBB61_418
-; CHECK-RV64-NEXT:  .LBB61_920: # %cond.load1617
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 406
-; CHECK-RV64-NEXT:    li a3, 405
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 41
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_921
-; CHECK-RV64-NEXT:    j .LBB61_419
-; CHECK-RV64-NEXT:  .LBB61_921: # %cond.load1621
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 407
-; CHECK-RV64-NEXT:    li a3, 406
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 40
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_922
-; CHECK-RV64-NEXT:    j .LBB61_420
-; CHECK-RV64-NEXT:  .LBB61_922: # %cond.load1625
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 408
-; CHECK-RV64-NEXT:    li a3, 407
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 39
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_923
-; CHECK-RV64-NEXT:    j .LBB61_421
-; CHECK-RV64-NEXT:  .LBB61_923: # %cond.load1629
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 409
-; CHECK-RV64-NEXT:    li a3, 408
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 38
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_924
-; CHECK-RV64-NEXT:    j .LBB61_422
-; CHECK-RV64-NEXT:  .LBB61_924: # %cond.load1633
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 410
-; CHECK-RV64-NEXT:    li a3, 409
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 37
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_925
-; CHECK-RV64-NEXT:    j .LBB61_423
-; CHECK-RV64-NEXT:  .LBB61_925: # %cond.load1637
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 411
-; CHECK-RV64-NEXT:    li a3, 410
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 36
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_926
-; CHECK-RV64-NEXT:    j .LBB61_424
-; CHECK-RV64-NEXT:  .LBB61_926: # %cond.load1641
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 412
-; CHECK-RV64-NEXT:    li a3, 411
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 35
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_927
-; CHECK-RV64-NEXT:    j .LBB61_425
-; CHECK-RV64-NEXT:  .LBB61_927: # %cond.load1645
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 413
-; CHECK-RV64-NEXT:    li a3, 412
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 34
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_928
-; CHECK-RV64-NEXT:    j .LBB61_426
-; CHECK-RV64-NEXT:  .LBB61_928: # %cond.load1649
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 414
-; CHECK-RV64-NEXT:    li a3, 413
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 33
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_929
-; CHECK-RV64-NEXT:    j .LBB61_427
-; CHECK-RV64-NEXT:  .LBB61_929: # %cond.load1653
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 415
-; CHECK-RV64-NEXT:    li a3, 414
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 32
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_930
-; CHECK-RV64-NEXT:    j .LBB61_428
-; CHECK-RV64-NEXT:  .LBB61_930: # %cond.load1657
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 416
-; CHECK-RV64-NEXT:    li a3, 415
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 31
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_931
-; CHECK-RV64-NEXT:    j .LBB61_429
-; CHECK-RV64-NEXT:  .LBB61_931: # %cond.load1661
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 417
-; CHECK-RV64-NEXT:    li a3, 416
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 30
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_932
-; CHECK-RV64-NEXT:    j .LBB61_430
-; CHECK-RV64-NEXT:  .LBB61_932: # %cond.load1665
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 418
-; CHECK-RV64-NEXT:    li a3, 417
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 29
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_933
-; CHECK-RV64-NEXT:    j .LBB61_431
-; CHECK-RV64-NEXT:  .LBB61_933: # %cond.load1669
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 419
-; CHECK-RV64-NEXT:    li a3, 418
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 28
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_934
-; CHECK-RV64-NEXT:    j .LBB61_432
-; CHECK-RV64-NEXT:  .LBB61_934: # %cond.load1673
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 420
-; CHECK-RV64-NEXT:    li a3, 419
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 27
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_935
-; CHECK-RV64-NEXT:    j .LBB61_433
-; CHECK-RV64-NEXT:  .LBB61_935: # %cond.load1677
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 421
-; CHECK-RV64-NEXT:    li a3, 420
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 26
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_936
-; CHECK-RV64-NEXT:    j .LBB61_434
-; CHECK-RV64-NEXT:  .LBB61_936: # %cond.load1681
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 422
-; CHECK-RV64-NEXT:    li a3, 421
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 25
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_937
-; CHECK-RV64-NEXT:    j .LBB61_435
-; CHECK-RV64-NEXT:  .LBB61_937: # %cond.load1685
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 423
-; CHECK-RV64-NEXT:    li a3, 422
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 24
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_938
-; CHECK-RV64-NEXT:    j .LBB61_436
-; CHECK-RV64-NEXT:  .LBB61_938: # %cond.load1689
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 424
-; CHECK-RV64-NEXT:    li a3, 423
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 23
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_939
-; CHECK-RV64-NEXT:    j .LBB61_437
-; CHECK-RV64-NEXT:  .LBB61_939: # %cond.load1693
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 425
-; CHECK-RV64-NEXT:    li a3, 424
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 22
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_940
-; CHECK-RV64-NEXT:    j .LBB61_438
-; CHECK-RV64-NEXT:  .LBB61_940: # %cond.load1697
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 426
-; CHECK-RV64-NEXT:    li a3, 425
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 21
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_941
-; CHECK-RV64-NEXT:    j .LBB61_439
-; CHECK-RV64-NEXT:  .LBB61_941: # %cond.load1701
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 427
-; CHECK-RV64-NEXT:    li a3, 426
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 20
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_942
-; CHECK-RV64-NEXT:    j .LBB61_440
-; CHECK-RV64-NEXT:  .LBB61_942: # %cond.load1705
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 428
-; CHECK-RV64-NEXT:    li a3, 427
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 19
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_943
-; CHECK-RV64-NEXT:    j .LBB61_441
-; CHECK-RV64-NEXT:  .LBB61_943: # %cond.load1709
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 429
-; CHECK-RV64-NEXT:    li a3, 428
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 18
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_944
-; CHECK-RV64-NEXT:    j .LBB61_442
-; CHECK-RV64-NEXT:  .LBB61_944: # %cond.load1713
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 430
-; CHECK-RV64-NEXT:    li a3, 429
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 17
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_945
-; CHECK-RV64-NEXT:    j .LBB61_443
-; CHECK-RV64-NEXT:  .LBB61_945: # %cond.load1717
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 431
-; CHECK-RV64-NEXT:    li a3, 430
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 16
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_946
-; CHECK-RV64-NEXT:    j .LBB61_444
-; CHECK-RV64-NEXT:  .LBB61_946: # %cond.load1721
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 432
-; CHECK-RV64-NEXT:    li a3, 431
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 15
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_947
-; CHECK-RV64-NEXT:    j .LBB61_445
-; CHECK-RV64-NEXT:  .LBB61_947: # %cond.load1725
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 433
-; CHECK-RV64-NEXT:    li a3, 432
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 14
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_948
-; CHECK-RV64-NEXT:    j .LBB61_446
-; CHECK-RV64-NEXT:  .LBB61_948: # %cond.load1729
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 434
-; CHECK-RV64-NEXT:    li a3, 433
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 13
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_949
-; CHECK-RV64-NEXT:    j .LBB61_447
-; CHECK-RV64-NEXT:  .LBB61_949: # %cond.load1733
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 435
-; CHECK-RV64-NEXT:    li a3, 434
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 12
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_950
-; CHECK-RV64-NEXT:    j .LBB61_448
-; CHECK-RV64-NEXT:  .LBB61_950: # %cond.load1737
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 436
-; CHECK-RV64-NEXT:    li a3, 435
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 11
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_951
-; CHECK-RV64-NEXT:    j .LBB61_449
-; CHECK-RV64-NEXT:  .LBB61_951: # %cond.load1741
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 437
-; CHECK-RV64-NEXT:    li a3, 436
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 10
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_952
-; CHECK-RV64-NEXT:    j .LBB61_450
-; CHECK-RV64-NEXT:  .LBB61_952: # %cond.load1745
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 438
-; CHECK-RV64-NEXT:    li a3, 437
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 9
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_953
-; CHECK-RV64-NEXT:    j .LBB61_451
-; CHECK-RV64-NEXT:  .LBB61_953: # %cond.load1749
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 439
-; CHECK-RV64-NEXT:    li a3, 438
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 8
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_954
-; CHECK-RV64-NEXT:    j .LBB61_452
-; CHECK-RV64-NEXT:  .LBB61_954: # %cond.load1753
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 440
-; CHECK-RV64-NEXT:    li a3, 439
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 7
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_955
-; CHECK-RV64-NEXT:    j .LBB61_453
-; CHECK-RV64-NEXT:  .LBB61_955: # %cond.load1757
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 441
-; CHECK-RV64-NEXT:    li a3, 440
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 6
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_956
-; CHECK-RV64-NEXT:    j .LBB61_454
-; CHECK-RV64-NEXT:  .LBB61_956: # %cond.load1761
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 442
-; CHECK-RV64-NEXT:    li a3, 441
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 5
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_957
-; CHECK-RV64-NEXT:    j .LBB61_455
-; CHECK-RV64-NEXT:  .LBB61_957: # %cond.load1765
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 443
-; CHECK-RV64-NEXT:    li a3, 442
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 4
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_958
-; CHECK-RV64-NEXT:    j .LBB61_456
-; CHECK-RV64-NEXT:  .LBB61_958: # %cond.load1769
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 444
-; CHECK-RV64-NEXT:    li a3, 443
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 3
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_959
-; CHECK-RV64-NEXT:    j .LBB61_457
-; CHECK-RV64-NEXT:  .LBB61_959: # %cond.load1773
-; CHECK-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-RV64-NEXT:    li a1, 445
-; CHECK-RV64-NEXT:    li a3, 444
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a1, a2, 2
-; CHECK-RV64-NEXT:    bgez a1, .LBB61_1031
-; CHECK-RV64-NEXT:    j .LBB61_458
-; CHECK-RV64-NEXT:  .LBB61_1031: # %cond.load1773
-; CHECK-RV64-NEXT:    j .LBB61_459
-; CHECK-RV64-NEXT:  .LBB61_960: # %cond.load1785
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 448
-; CHECK-RV64-NEXT:    li a3, 447
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a2, a1, 1
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_961
-; CHECK-RV64-NEXT:    j .LBB61_463
-; CHECK-RV64-NEXT:  .LBB61_961: # %cond.load1789
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 449
-; CHECK-RV64-NEXT:    li a3, 448
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a2, a1, 2
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_962
-; CHECK-RV64-NEXT:    j .LBB61_464
-; CHECK-RV64-NEXT:  .LBB61_962: # %cond.load1793
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 450
-; CHECK-RV64-NEXT:    li a3, 449
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a2, a1, 4
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_963
-; CHECK-RV64-NEXT:    j .LBB61_465
-; CHECK-RV64-NEXT:  .LBB61_963: # %cond.load1797
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 451
-; CHECK-RV64-NEXT:    li a3, 450
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a2, a1, 8
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_964
-; CHECK-RV64-NEXT:    j .LBB61_466
-; CHECK-RV64-NEXT:  .LBB61_964: # %cond.load1801
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 452
-; CHECK-RV64-NEXT:    li a3, 451
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a2, a1, 16
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_965
-; CHECK-RV64-NEXT:    j .LBB61_467
-; CHECK-RV64-NEXT:  .LBB61_965: # %cond.load1805
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 453
-; CHECK-RV64-NEXT:    li a3, 452
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a2, a1, 32
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_966
-; CHECK-RV64-NEXT:    j .LBB61_468
-; CHECK-RV64-NEXT:  .LBB61_966: # %cond.load1809
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 454
-; CHECK-RV64-NEXT:    li a3, 453
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a2, a1, 64
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_967
-; CHECK-RV64-NEXT:    j .LBB61_469
-; CHECK-RV64-NEXT:  .LBB61_967: # %cond.load1813
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 455
-; CHECK-RV64-NEXT:    li a3, 454
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a2, a1, 128
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_968
-; CHECK-RV64-NEXT:    j .LBB61_470
-; CHECK-RV64-NEXT:  .LBB61_968: # %cond.load1817
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 456
-; CHECK-RV64-NEXT:    li a3, 455
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a2, a1, 256
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_969
-; CHECK-RV64-NEXT:    j .LBB61_471
-; CHECK-RV64-NEXT:  .LBB61_969: # %cond.load1821
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 457
-; CHECK-RV64-NEXT:    li a3, 456
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a2, a1, 512
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_970
-; CHECK-RV64-NEXT:    j .LBB61_472
-; CHECK-RV64-NEXT:  .LBB61_970: # %cond.load1825
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 458
-; CHECK-RV64-NEXT:    li a3, 457
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    andi a2, a1, 1024
-; CHECK-RV64-NEXT:    bnez a2, .LBB61_971
-; CHECK-RV64-NEXT:    j .LBB61_473
-; CHECK-RV64-NEXT:  .LBB61_971: # %cond.load1829
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 459
-; CHECK-RV64-NEXT:    li a3, 458
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 52
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_972
-; CHECK-RV64-NEXT:    j .LBB61_474
-; CHECK-RV64-NEXT:  .LBB61_972: # %cond.load1833
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 460
-; CHECK-RV64-NEXT:    li a3, 459
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 51
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_973
-; CHECK-RV64-NEXT:    j .LBB61_475
-; CHECK-RV64-NEXT:  .LBB61_973: # %cond.load1837
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 461
-; CHECK-RV64-NEXT:    li a3, 460
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 50
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_974
-; CHECK-RV64-NEXT:    j .LBB61_476
-; CHECK-RV64-NEXT:  .LBB61_974: # %cond.load1841
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 462
-; CHECK-RV64-NEXT:    li a3, 461
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 49
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_975
-; CHECK-RV64-NEXT:    j .LBB61_477
-; CHECK-RV64-NEXT:  .LBB61_975: # %cond.load1845
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 463
-; CHECK-RV64-NEXT:    li a3, 462
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 48
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_976
-; CHECK-RV64-NEXT:    j .LBB61_478
-; CHECK-RV64-NEXT:  .LBB61_976: # %cond.load1849
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 464
-; CHECK-RV64-NEXT:    li a3, 463
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 47
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_977
-; CHECK-RV64-NEXT:    j .LBB61_479
-; CHECK-RV64-NEXT:  .LBB61_977: # %cond.load1853
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 465
-; CHECK-RV64-NEXT:    li a3, 464
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 46
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_978
-; CHECK-RV64-NEXT:    j .LBB61_480
-; CHECK-RV64-NEXT:  .LBB61_978: # %cond.load1857
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 466
-; CHECK-RV64-NEXT:    li a3, 465
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 45
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_979
-; CHECK-RV64-NEXT:    j .LBB61_481
-; CHECK-RV64-NEXT:  .LBB61_979: # %cond.load1861
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 467
-; CHECK-RV64-NEXT:    li a3, 466
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 44
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_980
-; CHECK-RV64-NEXT:    j .LBB61_482
-; CHECK-RV64-NEXT:  .LBB61_980: # %cond.load1865
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 468
-; CHECK-RV64-NEXT:    li a3, 467
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 43
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_981
-; CHECK-RV64-NEXT:    j .LBB61_483
-; CHECK-RV64-NEXT:  .LBB61_981: # %cond.load1869
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 469
-; CHECK-RV64-NEXT:    li a3, 468
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 42
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_982
-; CHECK-RV64-NEXT:    j .LBB61_484
-; CHECK-RV64-NEXT:  .LBB61_982: # %cond.load1873
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 470
-; CHECK-RV64-NEXT:    li a3, 469
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 41
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_983
-; CHECK-RV64-NEXT:    j .LBB61_485
-; CHECK-RV64-NEXT:  .LBB61_983: # %cond.load1877
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 471
-; CHECK-RV64-NEXT:    li a3, 470
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 40
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_984
-; CHECK-RV64-NEXT:    j .LBB61_486
-; CHECK-RV64-NEXT:  .LBB61_984: # %cond.load1881
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 472
-; CHECK-RV64-NEXT:    li a3, 471
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 39
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_985
-; CHECK-RV64-NEXT:    j .LBB61_487
-; CHECK-RV64-NEXT:  .LBB61_985: # %cond.load1885
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 473
-; CHECK-RV64-NEXT:    li a3, 472
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 38
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_986
-; CHECK-RV64-NEXT:    j .LBB61_488
-; CHECK-RV64-NEXT:  .LBB61_986: # %cond.load1889
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 474
-; CHECK-RV64-NEXT:    li a3, 473
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 37
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_987
-; CHECK-RV64-NEXT:    j .LBB61_489
-; CHECK-RV64-NEXT:  .LBB61_987: # %cond.load1893
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 475
-; CHECK-RV64-NEXT:    li a3, 474
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 36
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_988
-; CHECK-RV64-NEXT:    j .LBB61_490
-; CHECK-RV64-NEXT:  .LBB61_988: # %cond.load1897
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 476
-; CHECK-RV64-NEXT:    li a3, 475
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 35
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_989
-; CHECK-RV64-NEXT:    j .LBB61_491
-; CHECK-RV64-NEXT:  .LBB61_989: # %cond.load1901
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 477
-; CHECK-RV64-NEXT:    li a3, 476
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 34
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_990
-; CHECK-RV64-NEXT:    j .LBB61_492
-; CHECK-RV64-NEXT:  .LBB61_990: # %cond.load1905
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 478
-; CHECK-RV64-NEXT:    li a3, 477
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 33
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_991
-; CHECK-RV64-NEXT:    j .LBB61_493
-; CHECK-RV64-NEXT:  .LBB61_991: # %cond.load1909
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 479
-; CHECK-RV64-NEXT:    li a3, 478
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 32
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_992
-; CHECK-RV64-NEXT:    j .LBB61_494
-; CHECK-RV64-NEXT:  .LBB61_992: # %cond.load1913
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 480
-; CHECK-RV64-NEXT:    li a3, 479
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 31
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_993
-; CHECK-RV64-NEXT:    j .LBB61_495
-; CHECK-RV64-NEXT:  .LBB61_993: # %cond.load1917
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 481
-; CHECK-RV64-NEXT:    li a3, 480
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 30
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_994
-; CHECK-RV64-NEXT:    j .LBB61_496
-; CHECK-RV64-NEXT:  .LBB61_994: # %cond.load1921
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 482
-; CHECK-RV64-NEXT:    li a3, 481
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 29
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_995
-; CHECK-RV64-NEXT:    j .LBB61_497
-; CHECK-RV64-NEXT:  .LBB61_995: # %cond.load1925
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 483
-; CHECK-RV64-NEXT:    li a3, 482
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 28
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_996
-; CHECK-RV64-NEXT:    j .LBB61_498
-; CHECK-RV64-NEXT:  .LBB61_996: # %cond.load1929
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 484
-; CHECK-RV64-NEXT:    li a3, 483
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 27
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_997
-; CHECK-RV64-NEXT:    j .LBB61_499
-; CHECK-RV64-NEXT:  .LBB61_997: # %cond.load1933
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 485
-; CHECK-RV64-NEXT:    li a3, 484
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 26
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_998
-; CHECK-RV64-NEXT:    j .LBB61_500
-; CHECK-RV64-NEXT:  .LBB61_998: # %cond.load1937
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 486
-; CHECK-RV64-NEXT:    li a3, 485
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 25
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_999
-; CHECK-RV64-NEXT:    j .LBB61_501
-; CHECK-RV64-NEXT:  .LBB61_999: # %cond.load1941
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 487
-; CHECK-RV64-NEXT:    li a3, 486
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 24
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_1000
-; CHECK-RV64-NEXT:    j .LBB61_502
-; CHECK-RV64-NEXT:  .LBB61_1000: # %cond.load1945
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 488
-; CHECK-RV64-NEXT:    li a3, 487
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 23
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_1001
-; CHECK-RV64-NEXT:    j .LBB61_503
-; CHECK-RV64-NEXT:  .LBB61_1001: # %cond.load1949
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 489
-; CHECK-RV64-NEXT:    li a3, 488
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 22
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_1002
-; CHECK-RV64-NEXT:    j .LBB61_504
-; CHECK-RV64-NEXT:  .LBB61_1002: # %cond.load1953
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 490
-; CHECK-RV64-NEXT:    li a3, 489
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 21
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_1003
-; CHECK-RV64-NEXT:    j .LBB61_505
-; CHECK-RV64-NEXT:  .LBB61_1003: # %cond.load1957
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 491
-; CHECK-RV64-NEXT:    li a3, 490
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 20
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_1004
-; CHECK-RV64-NEXT:    j .LBB61_506
-; CHECK-RV64-NEXT:  .LBB61_1004: # %cond.load1961
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 492
-; CHECK-RV64-NEXT:    li a3, 491
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 19
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_1005
-; CHECK-RV64-NEXT:    j .LBB61_507
-; CHECK-RV64-NEXT:  .LBB61_1005: # %cond.load1965
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 493
-; CHECK-RV64-NEXT:    li a3, 492
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 18
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_1006
-; CHECK-RV64-NEXT:    j .LBB61_508
-; CHECK-RV64-NEXT:  .LBB61_1006: # %cond.load1969
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 494
-; CHECK-RV64-NEXT:    li a3, 493
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 17
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_1007
-; CHECK-RV64-NEXT:    j .LBB61_509
-; CHECK-RV64-NEXT:  .LBB61_1007: # %cond.load1973
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 495
-; CHECK-RV64-NEXT:    li a3, 494
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 16
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_1008
-; CHECK-RV64-NEXT:    j .LBB61_510
-; CHECK-RV64-NEXT:  .LBB61_1008: # %cond.load1977
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 496
-; CHECK-RV64-NEXT:    li a3, 495
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 15
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_1009
-; CHECK-RV64-NEXT:    j .LBB61_511
-; CHECK-RV64-NEXT:  .LBB61_1009: # %cond.load1981
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 497
-; CHECK-RV64-NEXT:    li a3, 496
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 14
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_1010
-; CHECK-RV64-NEXT:    j .LBB61_512
-; CHECK-RV64-NEXT:  .LBB61_1010: # %cond.load1985
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 498
-; CHECK-RV64-NEXT:    li a3, 497
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 13
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_1011
-; CHECK-RV64-NEXT:    j .LBB61_513
-; CHECK-RV64-NEXT:  .LBB61_1011: # %cond.load1989
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 499
-; CHECK-RV64-NEXT:    li a3, 498
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 12
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_1012
-; CHECK-RV64-NEXT:    j .LBB61_514
-; CHECK-RV64-NEXT:  .LBB61_1012: # %cond.load1993
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 500
-; CHECK-RV64-NEXT:    li a3, 499
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 11
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_1013
-; CHECK-RV64-NEXT:    j .LBB61_515
-; CHECK-RV64-NEXT:  .LBB61_1013: # %cond.load1997
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 501
-; CHECK-RV64-NEXT:    li a3, 500
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 10
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_1014
-; CHECK-RV64-NEXT:    j .LBB61_516
-; CHECK-RV64-NEXT:  .LBB61_1014: # %cond.load2001
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 502
-; CHECK-RV64-NEXT:    li a3, 501
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 9
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_1015
-; CHECK-RV64-NEXT:    j .LBB61_517
-; CHECK-RV64-NEXT:  .LBB61_1015: # %cond.load2005
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 503
-; CHECK-RV64-NEXT:    li a3, 502
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 8
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_1016
-; CHECK-RV64-NEXT:    j .LBB61_518
-; CHECK-RV64-NEXT:  .LBB61_1016: # %cond.load2009
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 504
-; CHECK-RV64-NEXT:    li a3, 503
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 7
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_1017
-; CHECK-RV64-NEXT:    j .LBB61_519
-; CHECK-RV64-NEXT:  .LBB61_1017: # %cond.load2013
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 505
-; CHECK-RV64-NEXT:    li a3, 504
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 6
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_1018
-; CHECK-RV64-NEXT:    j .LBB61_520
-; CHECK-RV64-NEXT:  .LBB61_1018: # %cond.load2017
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 506
-; CHECK-RV64-NEXT:    li a3, 505
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 5
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_1019
-; CHECK-RV64-NEXT:    j .LBB61_521
-; CHECK-RV64-NEXT:  .LBB61_1019: # %cond.load2021
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 507
-; CHECK-RV64-NEXT:    li a3, 506
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 4
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_1020
-; CHECK-RV64-NEXT:    j .LBB61_522
-; CHECK-RV64-NEXT:  .LBB61_1020: # %cond.load2025
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 508
-; CHECK-RV64-NEXT:    li a3, 507
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 3
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_1021
-; CHECK-RV64-NEXT:    j .LBB61_523
-; CHECK-RV64-NEXT:  .LBB61_1021: # %cond.load2029
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 509
-; CHECK-RV64-NEXT:    li a3, 508
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 2
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_1022
-; CHECK-RV64-NEXT:    j .LBB61_524
-; CHECK-RV64-NEXT:  .LBB61_1022: # %cond.load2033
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 510
-; CHECK-RV64-NEXT:    li a3, 509
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    slli a2, a1, 1
-; CHECK-RV64-NEXT:    bltz a2, .LBB61_1023
-; CHECK-RV64-NEXT:    j .LBB61_525
-; CHECK-RV64-NEXT:  .LBB61_1023: # %cond.load2037
-; CHECK-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-RV64-NEXT:    li a3, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-RV64-NEXT:    li a2, 511
-; CHECK-RV64-NEXT:    li a3, 510
-; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-RV64-NEXT:    addi a0, a0, 1
-; CHECK-RV64-NEXT:    bltz a1, .LBB61_1024
-; CHECK-RV64-NEXT:    j .LBB61_526
-; CHECK-RV64-NEXT:  .LBB61_1024: # %cond.load2041
-; CHECK-RV64-NEXT:    lbu a0, 0(a0)
-; CHECK-RV64-NEXT:    li a1, 512
-; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
-; CHECK-RV64-NEXT:    vmv.s.x v16, a0
-; CHECK-RV64-NEXT:    li a0, 511
-; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a0
-; CHECK-RV64-NEXT:    ret
+; CHECK-INDEXED-RV64-LABEL: test_expandload_v512i8_vlen512:
+; CHECK-INDEXED-RV64:       # %bb.0:
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.x.s a2, v0
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 1
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_1
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_527
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_1: # %else
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 2
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_2
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_528
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_2: # %else2
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 4
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_3
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_529
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_3: # %else6
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 8
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_4
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_530
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_4: # %else10
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 16
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_5
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_531
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_5: # %else14
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 32
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_6
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_532
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_6: # %else18
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 64
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_7
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_533
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_7: # %else22
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 128
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_8
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_534
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_8: # %else26
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 256
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_9
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_535
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_9: # %else30
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 512
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_10
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_536
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_10: # %else34
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 1024
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_11
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_537
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_11: # %else38
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 52
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_12
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_538
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_12: # %else42
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 51
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_13
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_539
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_13: # %else46
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 50
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_14
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_540
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_14: # %else50
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 49
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_15
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_541
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_15: # %else54
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 48
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_16
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_542
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_16: # %else58
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 47
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_17
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_543
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_17: # %else62
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 46
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_18
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_544
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_18: # %else66
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 45
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_19
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_545
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_19: # %else70
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 44
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_20
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_546
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_20: # %else74
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 43
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_21
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_547
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_21: # %else78
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 42
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_22
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_548
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_22: # %else82
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 41
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_23
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_549
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_23: # %else86
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 40
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_24
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_550
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_24: # %else90
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 39
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_25
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_551
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_25: # %else94
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 38
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_26
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_552
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_26: # %else98
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 37
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_27
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_553
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_27: # %else102
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 36
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_28
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_554
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_28: # %else106
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 35
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_29
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_555
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_29: # %else110
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 34
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_30
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_556
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_30: # %else114
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 33
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_31
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_557
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_31: # %else118
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 32
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_32
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_558
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_32: # %else122
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 31
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_33
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_559
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_33: # %else126
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 30
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_34
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_560
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_34: # %else130
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 29
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_35
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_561
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_35: # %else134
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 28
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_36
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_562
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_36: # %else138
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 27
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_37
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_563
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_37: # %else142
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 26
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_38
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_564
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_38: # %else146
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 25
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_39
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_565
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_39: # %else150
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 24
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_40
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_566
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_40: # %else154
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 23
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_41
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_567
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_41: # %else158
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 22
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_42
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_568
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_42: # %else162
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 21
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_43
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_569
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_43: # %else166
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 20
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_44
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_570
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_44: # %else170
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 19
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_45
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_571
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_45: # %else174
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 18
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_46
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_572
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_46: # %else178
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 17
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_47
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_573
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_47: # %else182
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 16
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_48
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_574
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_48: # %else186
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 15
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_49
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_575
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_49: # %else190
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 14
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_50
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_576
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_50: # %else194
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 13
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_51
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_577
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_51: # %else198
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 12
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_52
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_578
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_52: # %else202
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 11
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_53
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_579
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_53: # %else206
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 10
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_54
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_580
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_54: # %else210
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 9
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_55
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_581
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_55: # %else214
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 8
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_56
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_582
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_56: # %else218
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 7
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_57
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_583
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_57: # %else222
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 6
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_58
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_584
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_58: # %else226
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 5
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_59
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_585
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_59: # %else230
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 4
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_60
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_586
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_60: # %else234
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 3
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_61
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_587
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_61: # %else238
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 2
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_63
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_62: # %cond.load241
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 62
+; CHECK-INDEXED-RV64-NEXT:    li a3, 61
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_63: # %else242
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 1
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vslidedown.vi v16, v0, 1
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_65
+; CHECK-INDEXED-RV64-NEXT:  # %bb.64: # %cond.load245
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v17, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 63
+; CHECK-INDEXED-RV64-NEXT:    li a3, 62
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v17, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_65: # %else246
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.x.s a1, v16
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_66
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_588
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_66: # %else250
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 1
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_67
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_589
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_67: # %else254
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 2
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_68
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_590
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_68: # %else258
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 4
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_69
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_591
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_69: # %else262
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 8
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_70
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_592
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_70: # %else266
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 16
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_71
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_593
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_71: # %else270
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 32
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_72
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_594
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_72: # %else274
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 64
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_73
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_595
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_73: # %else278
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 128
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_74
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_596
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_74: # %else282
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 256
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_75
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_597
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_75: # %else286
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 512
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_76
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_598
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_76: # %else290
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 1024
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_77
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_599
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_77: # %else294
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 52
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_78
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_600
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_78: # %else298
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 51
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_79
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_601
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_79: # %else302
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 50
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_80
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_602
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_80: # %else306
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 49
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_81
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_603
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_81: # %else310
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 48
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_82
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_604
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_82: # %else314
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 47
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_83
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_605
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_83: # %else318
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 46
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_84
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_606
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_84: # %else322
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 45
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_85
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_607
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_85: # %else326
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 44
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_86
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_608
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_86: # %else330
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 43
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_87
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_609
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_87: # %else334
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 42
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_88
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_610
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_88: # %else338
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 41
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_89
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_611
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_89: # %else342
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 40
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_90
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_612
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_90: # %else346
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 39
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_91
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_613
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_91: # %else350
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 38
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_92
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_614
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_92: # %else354
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 37
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_93
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_615
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_93: # %else358
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 36
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_94
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_616
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_94: # %else362
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 35
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_95
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_617
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_95: # %else366
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 34
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_96
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_618
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_96: # %else370
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 33
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_97
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_619
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_97: # %else374
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 32
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_98
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_620
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_98: # %else378
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 31
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_99
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_621
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_99: # %else382
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 30
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_100
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_622
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_100: # %else386
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 29
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_101
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_623
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_101: # %else390
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 28
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_102
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_624
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_102: # %else394
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 27
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_103
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_625
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_103: # %else398
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 26
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_104
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_626
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_104: # %else402
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 25
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_105
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_627
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_105: # %else406
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 24
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_106
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_628
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_106: # %else410
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 23
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_107
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_629
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_107: # %else414
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 22
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_108
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_630
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_108: # %else418
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 21
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_109
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_631
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_109: # %else422
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 20
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_110
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_632
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_110: # %else426
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 19
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_111
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_633
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_111: # %else430
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 18
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_112
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_634
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_112: # %else434
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 17
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_113
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_635
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_113: # %else438
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 16
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_114
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_636
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_114: # %else442
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 15
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_115
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_637
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_115: # %else446
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 14
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_116
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_638
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_116: # %else450
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 13
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_117
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_639
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_117: # %else454
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 12
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_118
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_640
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_118: # %else458
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 11
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_119
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_641
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_119: # %else462
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 10
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_120
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_642
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_120: # %else466
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 9
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_121
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_643
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_121: # %else470
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 8
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_122
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_644
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_122: # %else474
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 7
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_123
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_645
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_123: # %else478
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 6
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_124
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_646
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_124: # %else482
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 5
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_125
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_647
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_125: # %else486
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 4
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_126
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_648
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_126: # %else490
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 3
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_127
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_649
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_127: # %else494
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 2
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_129
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_128: # %cond.load497
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 126
+; CHECK-INDEXED-RV64-NEXT:    li a3, 125
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_129: # %else498
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 1
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vslidedown.vi v16, v0, 2
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_131
+; CHECK-INDEXED-RV64-NEXT:  # %bb.130: # %cond.load501
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v18, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 127
+; CHECK-INDEXED-RV64-NEXT:    li a3, 126
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v18, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_131: # %else502
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.x.s a2, v16
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_132
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_650
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_132: # %else506
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 1
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_133
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_651
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_133: # %else510
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 2
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_134
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_652
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_134: # %else514
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 4
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_135
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_653
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_135: # %else518
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 8
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_136
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_654
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_136: # %else522
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 16
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_137
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_655
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_137: # %else526
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 32
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_138
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_656
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_138: # %else530
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 64
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_139
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_657
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_139: # %else534
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 128
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_140
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_658
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_140: # %else538
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 256
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_141
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_659
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_141: # %else542
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 512
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_142
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_660
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_142: # %else546
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 1024
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_143
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_661
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_143: # %else550
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 52
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_144
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_662
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_144: # %else554
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 51
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_145
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_663
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_145: # %else558
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 50
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_146
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_664
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_146: # %else562
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 49
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_147
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_665
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_147: # %else566
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 48
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_148
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_666
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_148: # %else570
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 47
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_149
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_667
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_149: # %else574
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 46
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_150
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_668
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_150: # %else578
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 45
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_151
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_669
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_151: # %else582
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 44
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_152
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_670
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_152: # %else586
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 43
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_153
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_671
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_153: # %else590
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 42
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_154
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_672
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_154: # %else594
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 41
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_155
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_673
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_155: # %else598
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 40
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_156
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_674
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_156: # %else602
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 39
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_157
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_675
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_157: # %else606
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 38
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_158
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_676
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_158: # %else610
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 37
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_159
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_677
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_159: # %else614
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 36
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_160
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_678
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_160: # %else618
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 35
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_161
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_679
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_161: # %else622
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 34
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_162
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_680
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_162: # %else626
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 33
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_163
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_681
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_163: # %else630
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 32
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_164
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_682
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_164: # %else634
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 31
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_165
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_683
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_165: # %else638
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 30
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_166
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_684
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_166: # %else642
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 29
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_167
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_685
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_167: # %else646
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 28
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_168
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_686
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_168: # %else650
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 27
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_169
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_687
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_169: # %else654
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 26
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_170
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_688
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_170: # %else658
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 25
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_171
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_689
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_171: # %else662
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 24
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_172
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_690
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_172: # %else666
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 23
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_173
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_691
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_173: # %else670
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 22
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_174
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_692
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_174: # %else674
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 21
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_175
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_693
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_175: # %else678
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 20
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_176
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_694
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_176: # %else682
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 19
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_177
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_695
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_177: # %else686
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 18
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_178
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_696
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_178: # %else690
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 17
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_179
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_697
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_179: # %else694
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 16
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_180
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_698
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_180: # %else698
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 15
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_181
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_699
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_181: # %else702
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 14
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_182
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_700
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_182: # %else706
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 13
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_183
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_701
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_183: # %else710
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 12
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_184
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_702
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_184: # %else714
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 11
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_185
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_703
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_185: # %else718
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 10
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_186
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_704
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_186: # %else722
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 9
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_187
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_705
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_187: # %else726
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 8
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_188
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_706
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_188: # %else730
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 7
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_189
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_707
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_189: # %else734
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 6
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_190
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_708
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_190: # %else738
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 5
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_191
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_709
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_191: # %else742
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 4
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_192
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_710
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_192: # %else746
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 3
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_193
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_711
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_193: # %else750
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 2
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_195
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_194: # %cond.load753
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 190
+; CHECK-INDEXED-RV64-NEXT:    li a3, 189
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_195: # %else754
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 1
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vslidedown.vi v16, v0, 3
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_197
+; CHECK-INDEXED-RV64-NEXT:  # %bb.196: # %cond.load757
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v20, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 191
+; CHECK-INDEXED-RV64-NEXT:    li a3, 190
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v20, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_197: # %else758
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.x.s a1, v16
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_198
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_712
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_198: # %else762
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 1
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_199
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_713
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_199: # %else766
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 2
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_200
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_714
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_200: # %else770
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 4
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_201
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_715
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_201: # %else774
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 8
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_202
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_716
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_202: # %else778
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 16
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_203
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_717
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_203: # %else782
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 32
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_204
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_718
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_204: # %else786
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 64
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_205
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_719
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_205: # %else790
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 128
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_206
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_720
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_206: # %else794
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 256
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_207
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_721
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_207: # %else798
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 512
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_208
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_722
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_208: # %else802
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 1024
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_209
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_723
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_209: # %else806
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 52
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_210
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_724
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_210: # %else810
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 51
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_211
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_725
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_211: # %else814
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 50
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_212
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_726
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_212: # %else818
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 49
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_213
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_727
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_213: # %else822
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 48
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_214
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_728
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_214: # %else826
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 47
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_215
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_729
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_215: # %else830
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 46
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_216
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_730
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_216: # %else834
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 45
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_217
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_731
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_217: # %else838
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 44
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_218
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_732
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_218: # %else842
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 43
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_219
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_733
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_219: # %else846
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 42
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_220
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_734
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_220: # %else850
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 41
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_221
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_735
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_221: # %else854
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 40
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_222
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_736
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_222: # %else858
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 39
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_223
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_737
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_223: # %else862
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 38
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_224
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_738
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_224: # %else866
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 37
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_225
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_739
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_225: # %else870
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 36
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_226
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_740
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_226: # %else874
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 35
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_227
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_741
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_227: # %else878
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 34
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_228
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_742
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_228: # %else882
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 33
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_229
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_743
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_229: # %else886
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 32
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_230
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_744
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_230: # %else890
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 31
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_231
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_745
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_231: # %else894
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 30
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_232
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_746
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_232: # %else898
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 29
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_233
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_747
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_233: # %else902
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 28
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_234
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_748
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_234: # %else906
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 27
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_235
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_749
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_235: # %else910
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 26
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_236
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_750
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_236: # %else914
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 25
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_237
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_751
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_237: # %else918
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 24
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_238
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_752
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_238: # %else922
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 23
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_239
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_753
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_239: # %else926
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 22
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_240
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_754
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_240: # %else930
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 21
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_241
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_755
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_241: # %else934
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 20
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_242
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_756
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_242: # %else938
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 19
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_243
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_757
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_243: # %else942
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 18
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_244
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_758
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_244: # %else946
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 17
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_245
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_759
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_245: # %else950
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 16
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_246
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_760
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_246: # %else954
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 15
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_247
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_761
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_247: # %else958
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 14
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_248
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_762
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_248: # %else962
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 13
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_249
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_763
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_249: # %else966
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 12
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_250
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_764
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_250: # %else970
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 11
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_251
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_765
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_251: # %else974
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 10
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_252
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_766
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_252: # %else978
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 9
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_253
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_767
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_253: # %else982
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 8
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_254
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_768
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_254: # %else986
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 7
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_255
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_769
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_255: # %else990
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 6
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_256
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_770
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_256: # %else994
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 5
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_257
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_771
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_257: # %else998
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 4
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_258
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_772
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_258: # %else1002
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 3
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_259
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_773
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_259: # %else1006
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 2
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_261
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_260: # %cond.load1009
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 254
+; CHECK-INDEXED-RV64-NEXT:    li a3, 253
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_261: # %else1010
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 1
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vslidedown.vi v16, v0, 4
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_263
+; CHECK-INDEXED-RV64-NEXT:  # %bb.262: # %cond.load1013
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v20, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 255
+; CHECK-INDEXED-RV64-NEXT:    li a3, 254
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v20, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_263: # %else1014
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.x.s a2, v16
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_264
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_774
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_264: # %else1018
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 1
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_265
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_775
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_265: # %else1022
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 2
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_266
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_776
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_266: # %else1026
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 4
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_267
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_777
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_267: # %else1030
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 8
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_268
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_778
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_268: # %else1034
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 16
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_269
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_779
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_269: # %else1038
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 32
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_270
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_780
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_270: # %else1042
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 64
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_271
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_781
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_271: # %else1046
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 128
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_272
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_782
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_272: # %else1050
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 256
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_273
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_783
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_273: # %else1054
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 512
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_274
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_784
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_274: # %else1058
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 1024
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_275
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_785
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_275: # %else1062
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 52
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_276
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_786
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_276: # %else1066
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 51
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_277
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_787
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_277: # %else1070
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 50
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_278
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_788
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_278: # %else1074
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 49
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_279
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_789
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_279: # %else1078
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 48
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_280
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_790
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_280: # %else1082
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 47
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_281
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_791
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_281: # %else1086
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 46
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_282
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_792
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_282: # %else1090
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 45
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_283
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_793
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_283: # %else1094
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 44
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_284
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_794
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_284: # %else1098
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 43
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_285
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_795
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_285: # %else1102
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 42
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_286
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_796
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_286: # %else1106
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 41
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_287
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_797
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_287: # %else1110
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 40
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_288
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_798
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_288: # %else1114
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 39
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_289
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_799
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_289: # %else1118
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 38
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_290
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_800
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_290: # %else1122
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 37
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_291
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_801
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_291: # %else1126
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 36
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_292
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_802
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_292: # %else1130
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 35
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_293
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_803
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_293: # %else1134
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 34
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_294
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_804
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_294: # %else1138
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 33
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_295
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_805
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_295: # %else1142
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 32
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_296
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_806
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_296: # %else1146
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 31
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_297
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_807
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_297: # %else1150
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 30
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_298
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_808
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_298: # %else1154
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 29
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_299
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_809
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_299: # %else1158
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 28
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_300
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_810
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_300: # %else1162
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 27
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_301
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_811
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_301: # %else1166
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 26
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_302
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_812
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_302: # %else1170
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 25
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_303
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_813
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_303: # %else1174
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 24
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_304
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_814
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_304: # %else1178
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 23
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_305
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_815
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_305: # %else1182
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 22
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_306
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_816
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_306: # %else1186
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 21
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_307
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_817
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_307: # %else1190
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 20
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_308
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_818
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_308: # %else1194
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 19
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_309
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_819
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_309: # %else1198
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 18
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_310
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_820
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_310: # %else1202
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 17
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_311
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_821
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_311: # %else1206
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 16
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_312
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_822
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_312: # %else1210
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 15
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_313
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_823
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_313: # %else1214
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 14
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_314
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_824
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_314: # %else1218
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 13
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_315
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_825
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_315: # %else1222
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 12
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_316
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_826
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_316: # %else1226
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 11
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_317
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_827
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_317: # %else1230
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 10
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_318
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_828
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_318: # %else1234
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 9
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_319
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_829
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_319: # %else1238
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 8
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_320
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_830
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_320: # %else1242
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 7
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_321
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_831
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_321: # %else1246
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 6
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_322
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_832
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_322: # %else1250
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 5
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_323
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_833
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_323: # %else1254
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 4
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_324
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_834
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_324: # %else1258
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 3
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_325
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_835
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_325: # %else1262
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 2
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_327
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_326: # %cond.load1265
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 318
+; CHECK-INDEXED-RV64-NEXT:    li a3, 317
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_327: # %else1266
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 1
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vslidedown.vi v16, v0, 5
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_329
+; CHECK-INDEXED-RV64-NEXT:  # %bb.328: # %cond.load1269
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 319
+; CHECK-INDEXED-RV64-NEXT:    li a3, 318
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_329: # %else1270
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.x.s a1, v16
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_330
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_836
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_330: # %else1274
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 1
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_331
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_837
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_331: # %else1278
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 2
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_332
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_838
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_332: # %else1282
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 4
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_333
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_839
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_333: # %else1286
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 8
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_334
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_840
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_334: # %else1290
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 16
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_335
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_841
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_335: # %else1294
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 32
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_336
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_842
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_336: # %else1298
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 64
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_337
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_843
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_337: # %else1302
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 128
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_338
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_844
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_338: # %else1306
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 256
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_339
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_845
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_339: # %else1310
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 512
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_340
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_846
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_340: # %else1314
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 1024
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_341
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_847
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_341: # %else1318
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 52
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_342
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_848
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_342: # %else1322
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 51
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_343
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_849
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_343: # %else1326
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 50
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_344
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_850
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_344: # %else1330
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 49
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_345
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_851
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_345: # %else1334
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 48
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_346
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_852
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_346: # %else1338
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 47
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_347
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_853
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_347: # %else1342
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 46
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_348
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_854
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_348: # %else1346
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 45
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_349
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_855
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_349: # %else1350
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 44
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_350
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_856
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_350: # %else1354
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 43
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_351
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_857
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_351: # %else1358
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 42
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_352
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_858
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_352: # %else1362
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 41
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_353
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_859
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_353: # %else1366
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 40
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_354
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_860
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_354: # %else1370
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 39
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_355
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_861
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_355: # %else1374
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 38
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_356
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_862
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_356: # %else1378
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 37
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_357
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_863
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_357: # %else1382
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 36
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_358
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_864
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_358: # %else1386
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 35
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_359
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_865
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_359: # %else1390
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 34
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_360
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_866
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_360: # %else1394
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 33
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_361
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_867
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_361: # %else1398
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 32
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_362
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_868
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_362: # %else1402
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 31
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_363
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_869
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_363: # %else1406
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 30
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_364
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_870
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_364: # %else1410
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 29
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_365
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_871
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_365: # %else1414
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 28
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_366
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_872
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_366: # %else1418
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 27
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_367
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_873
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_367: # %else1422
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 26
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_368
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_874
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_368: # %else1426
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 25
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_369
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_875
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_369: # %else1430
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 24
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_370
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_876
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_370: # %else1434
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 23
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_371
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_877
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_371: # %else1438
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 22
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_372
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_878
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_372: # %else1442
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 21
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_373
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_879
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_373: # %else1446
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 20
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_374
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_880
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_374: # %else1450
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 19
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_375
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_881
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_375: # %else1454
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 18
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_376
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_882
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_376: # %else1458
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 17
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_377
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_883
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_377: # %else1462
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 16
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_378
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_884
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_378: # %else1466
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 15
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_379
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_885
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_379: # %else1470
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 14
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_380
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_886
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_380: # %else1474
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 13
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_381
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_887
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_381: # %else1478
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 12
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_382
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_888
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_382: # %else1482
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 11
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_383
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_889
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_383: # %else1486
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 10
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_384
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_890
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_384: # %else1490
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 9
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_385
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_891
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_385: # %else1494
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 8
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_386
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_892
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_386: # %else1498
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 7
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_387
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_893
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_387: # %else1502
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 6
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_388
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_894
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_388: # %else1506
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 5
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_389
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_895
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_389: # %else1510
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 4
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_390
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_896
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_390: # %else1514
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 3
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_391
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_897
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_391: # %else1518
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 2
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_393
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_392: # %cond.load1521
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 382
+; CHECK-INDEXED-RV64-NEXT:    li a3, 381
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_393: # %else1522
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 1
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vslidedown.vi v16, v0, 6
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_395
+; CHECK-INDEXED-RV64-NEXT:  # %bb.394: # %cond.load1525
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 383
+; CHECK-INDEXED-RV64-NEXT:    li a3, 382
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_395: # %else1526
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.x.s a2, v16
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_396
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_898
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_396: # %else1530
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 1
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_397
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_899
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_397: # %else1534
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 2
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_398
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_900
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_398: # %else1538
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 4
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_399
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_901
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_399: # %else1542
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 8
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_400
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_902
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_400: # %else1546
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 16
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_401
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_903
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_401: # %else1550
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 32
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_402
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_904
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_402: # %else1554
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 64
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_403
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_905
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_403: # %else1558
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 128
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_404
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_906
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_404: # %else1562
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 256
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_405
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_907
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_405: # %else1566
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 512
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_406
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_908
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_406: # %else1570
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 1024
+; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_407
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_909
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_407: # %else1574
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 52
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_408
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_910
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_408: # %else1578
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 51
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_409
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_911
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_409: # %else1582
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 50
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_410
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_912
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_410: # %else1586
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 49
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_411
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_913
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_411: # %else1590
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 48
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_412
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_914
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_412: # %else1594
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 47
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_413
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_915
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_413: # %else1598
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 46
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_414
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_916
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_414: # %else1602
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 45
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_415
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_917
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_415: # %else1606
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 44
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_416
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_918
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_416: # %else1610
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 43
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_417
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_919
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_417: # %else1614
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 42
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_418
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_920
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_418: # %else1618
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 41
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_419
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_921
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_419: # %else1622
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 40
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_420
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_922
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_420: # %else1626
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 39
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_421
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_923
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_421: # %else1630
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 38
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_422
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_924
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_422: # %else1634
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 37
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_423
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_925
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_423: # %else1638
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 36
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_424
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_926
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_424: # %else1642
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 35
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_425
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_927
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_425: # %else1646
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 34
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_426
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_928
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_426: # %else1650
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 33
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_427
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_929
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_427: # %else1654
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 32
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_428
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_930
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_428: # %else1658
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 31
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_429
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_931
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_429: # %else1662
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 30
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_430
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_932
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_430: # %else1666
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 29
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_431
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_933
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_431: # %else1670
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 28
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_432
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_934
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_432: # %else1674
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 27
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_433
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_935
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_433: # %else1678
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 26
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_434
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_936
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_434: # %else1682
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 25
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_435
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_937
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_435: # %else1686
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 24
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_436
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_938
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_436: # %else1690
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 23
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_437
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_939
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_437: # %else1694
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 22
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_438
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_940
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_438: # %else1698
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 21
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_439
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_941
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_439: # %else1702
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 20
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_440
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_942
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_440: # %else1706
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 19
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_441
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_943
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_441: # %else1710
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 18
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_442
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_944
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_442: # %else1714
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 17
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_443
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_945
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_443: # %else1718
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 16
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_444
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_946
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_444: # %else1722
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 15
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_445
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_947
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_445: # %else1726
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 14
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_446
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_948
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_446: # %else1730
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 13
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_447
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_949
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_447: # %else1734
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 12
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_448
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_950
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_448: # %else1738
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 11
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_449
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_951
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_449: # %else1742
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 10
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_450
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_952
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_450: # %else1746
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 9
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_451
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_953
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_451: # %else1750
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 8
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_452
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_954
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_452: # %else1754
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 7
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_453
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_955
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_453: # %else1758
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 6
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_454
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_956
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_454: # %else1762
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 5
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_455
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_957
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_455: # %else1766
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 4
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_456
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_958
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_456: # %else1770
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 3
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_457
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_959
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_457: # %else1774
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 2
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_459
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_458: # %cond.load1777
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 446
+; CHECK-INDEXED-RV64-NEXT:    li a3, 445
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_459: # %else1778
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 1
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vslidedown.vi v16, v0, 7
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_461
+; CHECK-INDEXED-RV64-NEXT:  # %bb.460: # %cond.load1781
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 447
+; CHECK-INDEXED-RV64-NEXT:    li a3, 446
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_461: # %else1782
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.x.s a1, v16
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_462
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_960
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_462: # %else1786
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 1
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_463
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_961
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_463: # %else1790
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 2
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_464
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_962
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_464: # %else1794
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 4
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_465
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_963
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_465: # %else1798
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 8
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_466
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_964
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_466: # %else1802
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 16
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_467
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_965
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_467: # %else1806
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 32
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_468
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_966
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_468: # %else1810
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 64
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_469
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_967
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_469: # %else1814
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 128
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_470
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_968
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_470: # %else1818
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 256
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_471
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_969
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_471: # %else1822
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 512
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_472
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_970
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_472: # %else1826
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 1024
+; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_473
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_971
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_473: # %else1830
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 52
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_474
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_972
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_474: # %else1834
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 51
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_475
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_973
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_475: # %else1838
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 50
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_476
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_974
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_476: # %else1842
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 49
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_477
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_975
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_477: # %else1846
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 48
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_478
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_976
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_478: # %else1850
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 47
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_479
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_977
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_479: # %else1854
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 46
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_480
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_978
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_480: # %else1858
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 45
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_481
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_979
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_481: # %else1862
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 44
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_482
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_980
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_482: # %else1866
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 43
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_483
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_981
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_483: # %else1870
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 42
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_484
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_982
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_484: # %else1874
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 41
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_485
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_983
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_485: # %else1878
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 40
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_486
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_984
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_486: # %else1882
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 39
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_487
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_985
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_487: # %else1886
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 38
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_488
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_986
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_488: # %else1890
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 37
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_489
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_987
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_489: # %else1894
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 36
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_490
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_988
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_490: # %else1898
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 35
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_491
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_989
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_491: # %else1902
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 34
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_492
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_990
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_492: # %else1906
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 33
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_493
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_991
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_493: # %else1910
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 32
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_494
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_992
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_494: # %else1914
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 31
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_495
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_993
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_495: # %else1918
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 30
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_496
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_994
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_496: # %else1922
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 29
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_497
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_995
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_497: # %else1926
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 28
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_498
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_996
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_498: # %else1930
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 27
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_499
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_997
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_499: # %else1934
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 26
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_500
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_998
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_500: # %else1938
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 25
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_501
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_999
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_501: # %else1942
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 24
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_502
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1000
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_502: # %else1946
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 23
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_503
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1001
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_503: # %else1950
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 22
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_504
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1002
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_504: # %else1954
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 21
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_505
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1003
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_505: # %else1958
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 20
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_506
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1004
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_506: # %else1962
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 19
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_507
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1005
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_507: # %else1966
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 18
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_508
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1006
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_508: # %else1970
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 17
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_509
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1007
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_509: # %else1974
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 16
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_510
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1008
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_510: # %else1978
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 15
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_511
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1009
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_511: # %else1982
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 14
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_512
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1010
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_512: # %else1986
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 13
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_513
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1011
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_513: # %else1990
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 12
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_514
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1012
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_514: # %else1994
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 11
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_515
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1013
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_515: # %else1998
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 10
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_516
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1014
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_516: # %else2002
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 9
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_517
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1015
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_517: # %else2006
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 8
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_518
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1016
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_518: # %else2010
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 7
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_519
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1017
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_519: # %else2014
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 6
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_520
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1018
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_520: # %else2018
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 5
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_521
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1019
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_521: # %else2022
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 4
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_522
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1020
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_522: # %else2026
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 3
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_523
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1021
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_523: # %else2030
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 2
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_524
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1022
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_524: # %else2034
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 1
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_525
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1023
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_525: # %else2038
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_526
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1024
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_526: # %else2042
+; CHECK-INDEXED-RV64-NEXT:    ret
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_527: # %cond.load
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e8, mf8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v8, a1
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 2
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_528
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_2
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_528: # %cond.load1
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e8, mf8, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 2, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 1
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 4
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_529
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_3
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_529: # %cond.load5
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 3, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 2
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 8
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_530
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_4
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_530: # %cond.load9
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 4, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 16
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_531
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_5
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_531: # %cond.load13
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 5, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 4
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 32
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_532
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_6
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_532: # %cond.load17
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 6, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 5
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 64
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_533
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_7
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_533: # %cond.load21
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 7, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 6
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 128
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_534
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_8
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_534: # %cond.load25
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 8, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 7
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 256
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_535
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_9
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_535: # %cond.load29
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 9, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 8
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 512
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_536
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_10
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_536: # %cond.load33
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 10, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 9
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 1024
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_537
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_11
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_537: # %cond.load37
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 11, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 10
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 52
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_538
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_12
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_538: # %cond.load41
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 12, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 11
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 51
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_539
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_13
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_539: # %cond.load45
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 13, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 12
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 50
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_540
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_14
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_540: # %cond.load49
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 14, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 13
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 49
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_541
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_15
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_541: # %cond.load53
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 15, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 14
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 48
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_542
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_16
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_542: # %cond.load57
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 16, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 15
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 47
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_543
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_17
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_543: # %cond.load61
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 17, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 16
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 46
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_544
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_18
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_544: # %cond.load65
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 18, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 17
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 45
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_545
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_19
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_545: # %cond.load69
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 19, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 18
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 44
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_546
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_20
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_546: # %cond.load73
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 20, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 19
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 43
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_547
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_21
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_547: # %cond.load77
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 21, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 20
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 42
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_548
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_22
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_548: # %cond.load81
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 22, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 21
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 41
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_549
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_23
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_549: # %cond.load85
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 23, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 22
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 40
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_550
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_24
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_550: # %cond.load89
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 24, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 23
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 39
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_551
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_25
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_551: # %cond.load93
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 25, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 24
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 38
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_552
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_26
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_552: # %cond.load97
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 26, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 25
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 37
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_553
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_27
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_553: # %cond.load101
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 27, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 26
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 36
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_554
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_28
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_554: # %cond.load105
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 28, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 27
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 35
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_555
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_29
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_555: # %cond.load109
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 29, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 28
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 34
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_556
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_30
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_556: # %cond.load113
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 30, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 29
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 33
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_557
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_31
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_557: # %cond.load117
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 31, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 30
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 32
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_558
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_32
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_558: # %cond.load121
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 32
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v24, 31
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 31
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_559
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_33
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_559: # %cond.load125
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 33
+; CHECK-INDEXED-RV64-NEXT:    li a3, 32
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 30
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_560
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_34
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_560: # %cond.load129
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 34
+; CHECK-INDEXED-RV64-NEXT:    li a3, 33
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 29
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_561
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_35
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_561: # %cond.load133
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 35
+; CHECK-INDEXED-RV64-NEXT:    li a3, 34
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 28
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_562
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_36
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_562: # %cond.load137
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 36
+; CHECK-INDEXED-RV64-NEXT:    li a3, 35
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 27
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_563
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_37
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_563: # %cond.load141
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 37
+; CHECK-INDEXED-RV64-NEXT:    li a3, 36
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 26
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_564
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_38
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_564: # %cond.load145
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 38
+; CHECK-INDEXED-RV64-NEXT:    li a3, 37
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 25
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_565
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_39
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_565: # %cond.load149
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 39
+; CHECK-INDEXED-RV64-NEXT:    li a3, 38
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 24
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_566
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_40
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_566: # %cond.load153
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 40
+; CHECK-INDEXED-RV64-NEXT:    li a3, 39
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 23
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_567
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_41
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_567: # %cond.load157
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 41
+; CHECK-INDEXED-RV64-NEXT:    li a3, 40
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 22
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_568
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_42
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_568: # %cond.load161
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 42
+; CHECK-INDEXED-RV64-NEXT:    li a3, 41
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 21
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_569
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_43
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_569: # %cond.load165
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 43
+; CHECK-INDEXED-RV64-NEXT:    li a3, 42
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 20
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_570
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_44
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_570: # %cond.load169
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 44
+; CHECK-INDEXED-RV64-NEXT:    li a3, 43
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 19
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_571
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_45
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_571: # %cond.load173
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 45
+; CHECK-INDEXED-RV64-NEXT:    li a3, 44
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 18
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_572
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_46
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_572: # %cond.load177
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 46
+; CHECK-INDEXED-RV64-NEXT:    li a3, 45
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 17
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_573
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_47
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_573: # %cond.load181
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 47
+; CHECK-INDEXED-RV64-NEXT:    li a3, 46
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 16
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_574
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_48
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_574: # %cond.load185
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 48
+; CHECK-INDEXED-RV64-NEXT:    li a3, 47
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 15
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_575
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_49
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_575: # %cond.load189
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 49
+; CHECK-INDEXED-RV64-NEXT:    li a3, 48
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 14
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_576
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_50
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_576: # %cond.load193
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 50
+; CHECK-INDEXED-RV64-NEXT:    li a3, 49
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 13
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_577
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_51
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_577: # %cond.load197
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 51
+; CHECK-INDEXED-RV64-NEXT:    li a3, 50
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 12
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_578
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_52
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_578: # %cond.load201
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 52
+; CHECK-INDEXED-RV64-NEXT:    li a3, 51
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 11
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_579
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_53
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_579: # %cond.load205
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 53
+; CHECK-INDEXED-RV64-NEXT:    li a3, 52
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 10
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_580
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_54
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_580: # %cond.load209
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 54
+; CHECK-INDEXED-RV64-NEXT:    li a3, 53
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 9
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_581
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_55
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_581: # %cond.load213
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 55
+; CHECK-INDEXED-RV64-NEXT:    li a3, 54
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 8
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_582
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_56
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_582: # %cond.load217
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 56
+; CHECK-INDEXED-RV64-NEXT:    li a3, 55
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 7
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_583
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_57
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_583: # %cond.load221
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 57
+; CHECK-INDEXED-RV64-NEXT:    li a3, 56
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 6
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_584
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_58
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_584: # %cond.load225
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 58
+; CHECK-INDEXED-RV64-NEXT:    li a3, 57
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 5
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_585
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_59
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_585: # %cond.load229
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 59
+; CHECK-INDEXED-RV64-NEXT:    li a3, 58
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 4
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_586
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_60
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_586: # %cond.load233
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 60
+; CHECK-INDEXED-RV64-NEXT:    li a3, 59
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 3
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_587
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_61
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_587: # %cond.load237
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 61
+; CHECK-INDEXED-RV64-NEXT:    li a3, 60
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 2
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_1025
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_62
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_1025: # %cond.load237
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_63
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_588: # %cond.load249
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 64
+; CHECK-INDEXED-RV64-NEXT:    li a3, 63
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m1, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 1
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_589
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_67
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_589: # %cond.load253
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 65
+; CHECK-INDEXED-RV64-NEXT:    li a3, 64
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 2
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_590
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_68
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_590: # %cond.load257
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 66
+; CHECK-INDEXED-RV64-NEXT:    li a3, 65
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 4
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_591
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_69
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_591: # %cond.load261
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 67
+; CHECK-INDEXED-RV64-NEXT:    li a3, 66
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 8
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_592
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_70
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_592: # %cond.load265
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 68
+; CHECK-INDEXED-RV64-NEXT:    li a3, 67
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 16
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_593
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_71
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_593: # %cond.load269
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 69
+; CHECK-INDEXED-RV64-NEXT:    li a3, 68
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 32
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_594
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_72
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_594: # %cond.load273
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 70
+; CHECK-INDEXED-RV64-NEXT:    li a3, 69
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 64
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_595
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_73
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_595: # %cond.load277
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 71
+; CHECK-INDEXED-RV64-NEXT:    li a3, 70
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 128
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_596
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_74
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_596: # %cond.load281
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 72
+; CHECK-INDEXED-RV64-NEXT:    li a3, 71
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 256
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_597
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_75
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_597: # %cond.load285
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 73
+; CHECK-INDEXED-RV64-NEXT:    li a3, 72
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 512
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_598
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_76
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_598: # %cond.load289
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 74
+; CHECK-INDEXED-RV64-NEXT:    li a3, 73
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 1024
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_599
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_77
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_599: # %cond.load293
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 75
+; CHECK-INDEXED-RV64-NEXT:    li a3, 74
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 52
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_600
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_78
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_600: # %cond.load297
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 76
+; CHECK-INDEXED-RV64-NEXT:    li a3, 75
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 51
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_601
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_79
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_601: # %cond.load301
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 77
+; CHECK-INDEXED-RV64-NEXT:    li a3, 76
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 50
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_602
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_80
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_602: # %cond.load305
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 78
+; CHECK-INDEXED-RV64-NEXT:    li a3, 77
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 49
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_603
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_81
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_603: # %cond.load309
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 79
+; CHECK-INDEXED-RV64-NEXT:    li a3, 78
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 48
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_604
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_82
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_604: # %cond.load313
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 80
+; CHECK-INDEXED-RV64-NEXT:    li a3, 79
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 47
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_605
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_83
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_605: # %cond.load317
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 81
+; CHECK-INDEXED-RV64-NEXT:    li a3, 80
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 46
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_606
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_84
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_606: # %cond.load321
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 82
+; CHECK-INDEXED-RV64-NEXT:    li a3, 81
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 45
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_607
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_85
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_607: # %cond.load325
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 83
+; CHECK-INDEXED-RV64-NEXT:    li a3, 82
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 44
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_608
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_86
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_608: # %cond.load329
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 84
+; CHECK-INDEXED-RV64-NEXT:    li a3, 83
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 43
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_609
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_87
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_609: # %cond.load333
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 85
+; CHECK-INDEXED-RV64-NEXT:    li a3, 84
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 42
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_610
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_88
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_610: # %cond.load337
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 86
+; CHECK-INDEXED-RV64-NEXT:    li a3, 85
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 41
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_611
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_89
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_611: # %cond.load341
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 87
+; CHECK-INDEXED-RV64-NEXT:    li a3, 86
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 40
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_612
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_90
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_612: # %cond.load345
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 88
+; CHECK-INDEXED-RV64-NEXT:    li a3, 87
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 39
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_613
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_91
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_613: # %cond.load349
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 89
+; CHECK-INDEXED-RV64-NEXT:    li a3, 88
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 38
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_614
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_92
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_614: # %cond.load353
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 90
+; CHECK-INDEXED-RV64-NEXT:    li a3, 89
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 37
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_615
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_93
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_615: # %cond.load357
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 91
+; CHECK-INDEXED-RV64-NEXT:    li a3, 90
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 36
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_616
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_94
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_616: # %cond.load361
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 92
+; CHECK-INDEXED-RV64-NEXT:    li a3, 91
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 35
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_617
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_95
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_617: # %cond.load365
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 93
+; CHECK-INDEXED-RV64-NEXT:    li a3, 92
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 34
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_618
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_96
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_618: # %cond.load369
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 94
+; CHECK-INDEXED-RV64-NEXT:    li a3, 93
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 33
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_619
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_97
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_619: # %cond.load373
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 95
+; CHECK-INDEXED-RV64-NEXT:    li a3, 94
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 32
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_620
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_98
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_620: # %cond.load377
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 96
+; CHECK-INDEXED-RV64-NEXT:    li a3, 95
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 31
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_621
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_99
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_621: # %cond.load381
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 97
+; CHECK-INDEXED-RV64-NEXT:    li a3, 96
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 30
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_622
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_100
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_622: # %cond.load385
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 98
+; CHECK-INDEXED-RV64-NEXT:    li a3, 97
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 29
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_623
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_101
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_623: # %cond.load389
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 99
+; CHECK-INDEXED-RV64-NEXT:    li a3, 98
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 28
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_624
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_102
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_624: # %cond.load393
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 100
+; CHECK-INDEXED-RV64-NEXT:    li a3, 99
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 27
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_625
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_103
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_625: # %cond.load397
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 101
+; CHECK-INDEXED-RV64-NEXT:    li a3, 100
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 26
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_626
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_104
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_626: # %cond.load401
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 102
+; CHECK-INDEXED-RV64-NEXT:    li a3, 101
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 25
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_627
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_105
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_627: # %cond.load405
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 103
+; CHECK-INDEXED-RV64-NEXT:    li a3, 102
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 24
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_628
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_106
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_628: # %cond.load409
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 104
+; CHECK-INDEXED-RV64-NEXT:    li a3, 103
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 23
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_629
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_107
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_629: # %cond.load413
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 105
+; CHECK-INDEXED-RV64-NEXT:    li a3, 104
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 22
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_630
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_108
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_630: # %cond.load417
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 106
+; CHECK-INDEXED-RV64-NEXT:    li a3, 105
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 21
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_631
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_109
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_631: # %cond.load421
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 107
+; CHECK-INDEXED-RV64-NEXT:    li a3, 106
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 20
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_632
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_110
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_632: # %cond.load425
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 108
+; CHECK-INDEXED-RV64-NEXT:    li a3, 107
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 19
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_633
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_111
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_633: # %cond.load429
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 109
+; CHECK-INDEXED-RV64-NEXT:    li a3, 108
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 18
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_634
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_112
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_634: # %cond.load433
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 110
+; CHECK-INDEXED-RV64-NEXT:    li a3, 109
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 17
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_635
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_113
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_635: # %cond.load437
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 111
+; CHECK-INDEXED-RV64-NEXT:    li a3, 110
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 16
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_636
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_114
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_636: # %cond.load441
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 112
+; CHECK-INDEXED-RV64-NEXT:    li a3, 111
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 15
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_637
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_115
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_637: # %cond.load445
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 113
+; CHECK-INDEXED-RV64-NEXT:    li a3, 112
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 14
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_638
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_116
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_638: # %cond.load449
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 114
+; CHECK-INDEXED-RV64-NEXT:    li a3, 113
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 13
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_639
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_117
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_639: # %cond.load453
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 115
+; CHECK-INDEXED-RV64-NEXT:    li a3, 114
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 12
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_640
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_118
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_640: # %cond.load457
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 116
+; CHECK-INDEXED-RV64-NEXT:    li a3, 115
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 11
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_641
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_119
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_641: # %cond.load461
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 117
+; CHECK-INDEXED-RV64-NEXT:    li a3, 116
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 10
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_642
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_120
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_642: # %cond.load465
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 118
+; CHECK-INDEXED-RV64-NEXT:    li a3, 117
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 9
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_643
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_121
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_643: # %cond.load469
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 119
+; CHECK-INDEXED-RV64-NEXT:    li a3, 118
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 8
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_644
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_122
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_644: # %cond.load473
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 120
+; CHECK-INDEXED-RV64-NEXT:    li a3, 119
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 7
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_645
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_123
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_645: # %cond.load477
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 121
+; CHECK-INDEXED-RV64-NEXT:    li a3, 120
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 6
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_646
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_124
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_646: # %cond.load481
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 122
+; CHECK-INDEXED-RV64-NEXT:    li a3, 121
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 5
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_647
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_125
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_647: # %cond.load485
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 123
+; CHECK-INDEXED-RV64-NEXT:    li a3, 122
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 4
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_648
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_126
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_648: # %cond.load489
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 124
+; CHECK-INDEXED-RV64-NEXT:    li a3, 123
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 3
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_649
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_127
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_649: # %cond.load493
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 125
+; CHECK-INDEXED-RV64-NEXT:    li a3, 124
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 2
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_1026
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_128
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_1026: # %cond.load493
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_129
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_650: # %cond.load505
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 128
+; CHECK-INDEXED-RV64-NEXT:    li a3, 127
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m2, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 1
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_651
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_133
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_651: # %cond.load509
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 129
+; CHECK-INDEXED-RV64-NEXT:    li a3, 128
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 2
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_652
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_134
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_652: # %cond.load513
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 130
+; CHECK-INDEXED-RV64-NEXT:    li a3, 129
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 4
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_653
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_135
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_653: # %cond.load517
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 131
+; CHECK-INDEXED-RV64-NEXT:    li a3, 130
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 8
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_654
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_136
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_654: # %cond.load521
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 132
+; CHECK-INDEXED-RV64-NEXT:    li a3, 131
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 16
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_655
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_137
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_655: # %cond.load525
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 133
+; CHECK-INDEXED-RV64-NEXT:    li a3, 132
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 32
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_656
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_138
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_656: # %cond.load529
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 134
+; CHECK-INDEXED-RV64-NEXT:    li a3, 133
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 64
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_657
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_139
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_657: # %cond.load533
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 135
+; CHECK-INDEXED-RV64-NEXT:    li a3, 134
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 128
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_658
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_140
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_658: # %cond.load537
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 136
+; CHECK-INDEXED-RV64-NEXT:    li a3, 135
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 256
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_659
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_141
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_659: # %cond.load541
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 137
+; CHECK-INDEXED-RV64-NEXT:    li a3, 136
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 512
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_660
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_142
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_660: # %cond.load545
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 138
+; CHECK-INDEXED-RV64-NEXT:    li a3, 137
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 1024
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_661
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_143
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_661: # %cond.load549
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 139
+; CHECK-INDEXED-RV64-NEXT:    li a3, 138
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 52
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_662
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_144
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_662: # %cond.load553
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 140
+; CHECK-INDEXED-RV64-NEXT:    li a3, 139
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 51
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_663
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_145
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_663: # %cond.load557
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 141
+; CHECK-INDEXED-RV64-NEXT:    li a3, 140
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 50
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_664
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_146
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_664: # %cond.load561
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 142
+; CHECK-INDEXED-RV64-NEXT:    li a3, 141
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 49
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_665
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_147
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_665: # %cond.load565
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 143
+; CHECK-INDEXED-RV64-NEXT:    li a3, 142
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 48
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_666
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_148
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_666: # %cond.load569
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 144
+; CHECK-INDEXED-RV64-NEXT:    li a3, 143
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 47
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_667
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_149
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_667: # %cond.load573
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 145
+; CHECK-INDEXED-RV64-NEXT:    li a3, 144
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 46
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_668
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_150
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_668: # %cond.load577
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 146
+; CHECK-INDEXED-RV64-NEXT:    li a3, 145
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 45
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_669
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_151
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_669: # %cond.load581
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 147
+; CHECK-INDEXED-RV64-NEXT:    li a3, 146
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 44
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_670
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_152
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_670: # %cond.load585
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 148
+; CHECK-INDEXED-RV64-NEXT:    li a3, 147
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 43
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_671
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_153
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_671: # %cond.load589
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 149
+; CHECK-INDEXED-RV64-NEXT:    li a3, 148
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 42
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_672
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_154
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_672: # %cond.load593
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 150
+; CHECK-INDEXED-RV64-NEXT:    li a3, 149
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 41
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_673
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_155
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_673: # %cond.load597
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 151
+; CHECK-INDEXED-RV64-NEXT:    li a3, 150
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 40
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_674
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_156
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_674: # %cond.load601
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 152
+; CHECK-INDEXED-RV64-NEXT:    li a3, 151
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 39
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_675
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_157
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_675: # %cond.load605
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 153
+; CHECK-INDEXED-RV64-NEXT:    li a3, 152
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 38
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_676
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_158
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_676: # %cond.load609
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 154
+; CHECK-INDEXED-RV64-NEXT:    li a3, 153
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 37
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_677
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_159
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_677: # %cond.load613
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 155
+; CHECK-INDEXED-RV64-NEXT:    li a3, 154
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 36
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_678
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_160
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_678: # %cond.load617
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 156
+; CHECK-INDEXED-RV64-NEXT:    li a3, 155
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 35
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_679
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_161
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_679: # %cond.load621
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 157
+; CHECK-INDEXED-RV64-NEXT:    li a3, 156
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 34
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_680
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_162
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_680: # %cond.load625
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 158
+; CHECK-INDEXED-RV64-NEXT:    li a3, 157
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 33
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_681
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_163
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_681: # %cond.load629
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 159
+; CHECK-INDEXED-RV64-NEXT:    li a3, 158
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 32
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_682
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_164
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_682: # %cond.load633
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 160
+; CHECK-INDEXED-RV64-NEXT:    li a3, 159
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 31
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_683
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_165
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_683: # %cond.load637
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 161
+; CHECK-INDEXED-RV64-NEXT:    li a3, 160
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 30
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_684
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_166
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_684: # %cond.load641
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 162
+; CHECK-INDEXED-RV64-NEXT:    li a3, 161
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 29
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_685
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_167
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_685: # %cond.load645
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 163
+; CHECK-INDEXED-RV64-NEXT:    li a3, 162
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 28
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_686
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_168
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_686: # %cond.load649
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 164
+; CHECK-INDEXED-RV64-NEXT:    li a3, 163
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 27
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_687
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_169
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_687: # %cond.load653
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 165
+; CHECK-INDEXED-RV64-NEXT:    li a3, 164
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 26
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_688
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_170
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_688: # %cond.load657
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 166
+; CHECK-INDEXED-RV64-NEXT:    li a3, 165
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 25
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_689
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_171
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_689: # %cond.load661
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 167
+; CHECK-INDEXED-RV64-NEXT:    li a3, 166
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 24
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_690
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_172
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_690: # %cond.load665
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 168
+; CHECK-INDEXED-RV64-NEXT:    li a3, 167
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 23
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_691
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_173
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_691: # %cond.load669
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 169
+; CHECK-INDEXED-RV64-NEXT:    li a3, 168
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 22
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_692
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_174
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_692: # %cond.load673
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 170
+; CHECK-INDEXED-RV64-NEXT:    li a3, 169
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 21
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_693
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_175
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_693: # %cond.load677
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 171
+; CHECK-INDEXED-RV64-NEXT:    li a3, 170
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 20
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_694
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_176
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_694: # %cond.load681
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 172
+; CHECK-INDEXED-RV64-NEXT:    li a3, 171
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 19
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_695
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_177
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_695: # %cond.load685
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 173
+; CHECK-INDEXED-RV64-NEXT:    li a3, 172
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 18
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_696
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_178
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_696: # %cond.load689
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 174
+; CHECK-INDEXED-RV64-NEXT:    li a3, 173
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 17
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_697
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_179
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_697: # %cond.load693
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 175
+; CHECK-INDEXED-RV64-NEXT:    li a3, 174
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 16
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_698
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_180
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_698: # %cond.load697
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 176
+; CHECK-INDEXED-RV64-NEXT:    li a3, 175
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 15
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_699
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_181
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_699: # %cond.load701
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 177
+; CHECK-INDEXED-RV64-NEXT:    li a3, 176
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 14
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_700
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_182
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_700: # %cond.load705
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 178
+; CHECK-INDEXED-RV64-NEXT:    li a3, 177
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 13
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_701
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_183
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_701: # %cond.load709
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 179
+; CHECK-INDEXED-RV64-NEXT:    li a3, 178
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 12
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_702
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_184
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_702: # %cond.load713
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 180
+; CHECK-INDEXED-RV64-NEXT:    li a3, 179
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 11
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_703
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_185
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_703: # %cond.load717
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 181
+; CHECK-INDEXED-RV64-NEXT:    li a3, 180
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 10
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_704
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_186
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_704: # %cond.load721
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 182
+; CHECK-INDEXED-RV64-NEXT:    li a3, 181
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 9
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_705
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_187
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_705: # %cond.load725
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 183
+; CHECK-INDEXED-RV64-NEXT:    li a3, 182
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 8
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_706
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_188
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_706: # %cond.load729
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 184
+; CHECK-INDEXED-RV64-NEXT:    li a3, 183
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 7
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_707
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_189
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_707: # %cond.load733
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 185
+; CHECK-INDEXED-RV64-NEXT:    li a3, 184
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 6
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_708
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_190
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_708: # %cond.load737
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 186
+; CHECK-INDEXED-RV64-NEXT:    li a3, 185
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 5
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_709
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_191
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_709: # %cond.load741
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 187
+; CHECK-INDEXED-RV64-NEXT:    li a3, 186
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 4
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_710
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_192
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_710: # %cond.load745
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 188
+; CHECK-INDEXED-RV64-NEXT:    li a3, 187
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 3
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_711
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_193
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_711: # %cond.load749
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 189
+; CHECK-INDEXED-RV64-NEXT:    li a3, 188
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 2
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_1027
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_194
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_1027: # %cond.load749
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_195
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_712: # %cond.load761
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 192
+; CHECK-INDEXED-RV64-NEXT:    li a3, 191
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 1
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_713
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_199
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_713: # %cond.load765
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 193
+; CHECK-INDEXED-RV64-NEXT:    li a3, 192
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 2
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_714
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_200
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_714: # %cond.load769
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 194
+; CHECK-INDEXED-RV64-NEXT:    li a3, 193
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 4
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_715
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_201
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_715: # %cond.load773
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 195
+; CHECK-INDEXED-RV64-NEXT:    li a3, 194
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 8
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_716
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_202
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_716: # %cond.load777
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 196
+; CHECK-INDEXED-RV64-NEXT:    li a3, 195
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 16
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_717
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_203
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_717: # %cond.load781
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 197
+; CHECK-INDEXED-RV64-NEXT:    li a3, 196
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 32
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_718
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_204
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_718: # %cond.load785
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 198
+; CHECK-INDEXED-RV64-NEXT:    li a3, 197
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 64
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_719
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_205
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_719: # %cond.load789
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 199
+; CHECK-INDEXED-RV64-NEXT:    li a3, 198
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 128
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_720
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_206
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_720: # %cond.load793
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 200
+; CHECK-INDEXED-RV64-NEXT:    li a3, 199
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 256
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_721
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_207
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_721: # %cond.load797
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 201
+; CHECK-INDEXED-RV64-NEXT:    li a3, 200
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 512
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_722
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_208
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_722: # %cond.load801
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 202
+; CHECK-INDEXED-RV64-NEXT:    li a3, 201
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 1024
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_723
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_209
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_723: # %cond.load805
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 203
+; CHECK-INDEXED-RV64-NEXT:    li a3, 202
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 52
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_724
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_210
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_724: # %cond.load809
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 204
+; CHECK-INDEXED-RV64-NEXT:    li a3, 203
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 51
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_725
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_211
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_725: # %cond.load813
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 205
+; CHECK-INDEXED-RV64-NEXT:    li a3, 204
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 50
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_726
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_212
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_726: # %cond.load817
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 206
+; CHECK-INDEXED-RV64-NEXT:    li a3, 205
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 49
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_727
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_213
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_727: # %cond.load821
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 207
+; CHECK-INDEXED-RV64-NEXT:    li a3, 206
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 48
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_728
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_214
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_728: # %cond.load825
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 208
+; CHECK-INDEXED-RV64-NEXT:    li a3, 207
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 47
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_729
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_215
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_729: # %cond.load829
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 209
+; CHECK-INDEXED-RV64-NEXT:    li a3, 208
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 46
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_730
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_216
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_730: # %cond.load833
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 210
+; CHECK-INDEXED-RV64-NEXT:    li a3, 209
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 45
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_731
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_217
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_731: # %cond.load837
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 211
+; CHECK-INDEXED-RV64-NEXT:    li a3, 210
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 44
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_732
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_218
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_732: # %cond.load841
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 212
+; CHECK-INDEXED-RV64-NEXT:    li a3, 211
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 43
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_733
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_219
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_733: # %cond.load845
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 213
+; CHECK-INDEXED-RV64-NEXT:    li a3, 212
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 42
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_734
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_220
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_734: # %cond.load849
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 214
+; CHECK-INDEXED-RV64-NEXT:    li a3, 213
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 41
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_735
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_221
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_735: # %cond.load853
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 215
+; CHECK-INDEXED-RV64-NEXT:    li a3, 214
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 40
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_736
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_222
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_736: # %cond.load857
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 216
+; CHECK-INDEXED-RV64-NEXT:    li a3, 215
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 39
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_737
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_223
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_737: # %cond.load861
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 217
+; CHECK-INDEXED-RV64-NEXT:    li a3, 216
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 38
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_738
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_224
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_738: # %cond.load865
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 218
+; CHECK-INDEXED-RV64-NEXT:    li a3, 217
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 37
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_739
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_225
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_739: # %cond.load869
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 219
+; CHECK-INDEXED-RV64-NEXT:    li a3, 218
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 36
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_740
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_226
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_740: # %cond.load873
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 220
+; CHECK-INDEXED-RV64-NEXT:    li a3, 219
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 35
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_741
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_227
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_741: # %cond.load877
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 221
+; CHECK-INDEXED-RV64-NEXT:    li a3, 220
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 34
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_742
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_228
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_742: # %cond.load881
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 222
+; CHECK-INDEXED-RV64-NEXT:    li a3, 221
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 33
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_743
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_229
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_743: # %cond.load885
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 223
+; CHECK-INDEXED-RV64-NEXT:    li a3, 222
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 32
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_744
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_230
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_744: # %cond.load889
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 224
+; CHECK-INDEXED-RV64-NEXT:    li a3, 223
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 31
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_745
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_231
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_745: # %cond.load893
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 225
+; CHECK-INDEXED-RV64-NEXT:    li a3, 224
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 30
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_746
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_232
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_746: # %cond.load897
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 226
+; CHECK-INDEXED-RV64-NEXT:    li a3, 225
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 29
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_747
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_233
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_747: # %cond.load901
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 227
+; CHECK-INDEXED-RV64-NEXT:    li a3, 226
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 28
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_748
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_234
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_748: # %cond.load905
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 228
+; CHECK-INDEXED-RV64-NEXT:    li a3, 227
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 27
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_749
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_235
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_749: # %cond.load909
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 229
+; CHECK-INDEXED-RV64-NEXT:    li a3, 228
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 26
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_750
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_236
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_750: # %cond.load913
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 230
+; CHECK-INDEXED-RV64-NEXT:    li a3, 229
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 25
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_751
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_237
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_751: # %cond.load917
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 231
+; CHECK-INDEXED-RV64-NEXT:    li a3, 230
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 24
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_752
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_238
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_752: # %cond.load921
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 232
+; CHECK-INDEXED-RV64-NEXT:    li a3, 231
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 23
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_753
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_239
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_753: # %cond.load925
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 233
+; CHECK-INDEXED-RV64-NEXT:    li a3, 232
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 22
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_754
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_240
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_754: # %cond.load929
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 234
+; CHECK-INDEXED-RV64-NEXT:    li a3, 233
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 21
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_755
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_241
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_755: # %cond.load933
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 235
+; CHECK-INDEXED-RV64-NEXT:    li a3, 234
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 20
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_756
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_242
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_756: # %cond.load937
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 236
+; CHECK-INDEXED-RV64-NEXT:    li a3, 235
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 19
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_757
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_243
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_757: # %cond.load941
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 237
+; CHECK-INDEXED-RV64-NEXT:    li a3, 236
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 18
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_758
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_244
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_758: # %cond.load945
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 238
+; CHECK-INDEXED-RV64-NEXT:    li a3, 237
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 17
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_759
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_245
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_759: # %cond.load949
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 239
+; CHECK-INDEXED-RV64-NEXT:    li a3, 238
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 16
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_760
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_246
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_760: # %cond.load953
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 240
+; CHECK-INDEXED-RV64-NEXT:    li a3, 239
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 15
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_761
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_247
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_761: # %cond.load957
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 241
+; CHECK-INDEXED-RV64-NEXT:    li a3, 240
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 14
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_762
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_248
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_762: # %cond.load961
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 242
+; CHECK-INDEXED-RV64-NEXT:    li a3, 241
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 13
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_763
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_249
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_763: # %cond.load965
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 243
+; CHECK-INDEXED-RV64-NEXT:    li a3, 242
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 12
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_764
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_250
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_764: # %cond.load969
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 244
+; CHECK-INDEXED-RV64-NEXT:    li a3, 243
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 11
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_765
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_251
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_765: # %cond.load973
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 245
+; CHECK-INDEXED-RV64-NEXT:    li a3, 244
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 10
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_766
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_252
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_766: # %cond.load977
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 246
+; CHECK-INDEXED-RV64-NEXT:    li a3, 245
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 9
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_767
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_253
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_767: # %cond.load981
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 247
+; CHECK-INDEXED-RV64-NEXT:    li a3, 246
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 8
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_768
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_254
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_768: # %cond.load985
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 248
+; CHECK-INDEXED-RV64-NEXT:    li a3, 247
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 7
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_769
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_255
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_769: # %cond.load989
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 249
+; CHECK-INDEXED-RV64-NEXT:    li a3, 248
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 6
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_770
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_256
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_770: # %cond.load993
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 250
+; CHECK-INDEXED-RV64-NEXT:    li a3, 249
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 5
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_771
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_257
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_771: # %cond.load997
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 251
+; CHECK-INDEXED-RV64-NEXT:    li a3, 250
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 4
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_772
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_258
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_772: # %cond.load1001
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 252
+; CHECK-INDEXED-RV64-NEXT:    li a3, 251
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 3
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_773
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_259
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_773: # %cond.load1005
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a2, 253
+; CHECK-INDEXED-RV64-NEXT:    li a3, 252
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 2
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_1028
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_260
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_1028: # %cond.load1005
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_261
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_774: # %cond.load1017
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    li a1, 256
+; CHECK-INDEXED-RV64-NEXT:    li a3, 255
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 1
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_775
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_265
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_775: # %cond.load1021
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 257
+; CHECK-INDEXED-RV64-NEXT:    li a3, 256
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 2
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_776
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_266
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_776: # %cond.load1025
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 258
+; CHECK-INDEXED-RV64-NEXT:    li a3, 257
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 4
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_777
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_267
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_777: # %cond.load1029
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 259
+; CHECK-INDEXED-RV64-NEXT:    li a3, 258
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 8
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_778
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_268
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_778: # %cond.load1033
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 260
+; CHECK-INDEXED-RV64-NEXT:    li a3, 259
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 16
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_779
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_269
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_779: # %cond.load1037
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 261
+; CHECK-INDEXED-RV64-NEXT:    li a3, 260
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 32
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_780
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_270
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_780: # %cond.load1041
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 262
+; CHECK-INDEXED-RV64-NEXT:    li a3, 261
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 64
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_781
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_271
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_781: # %cond.load1045
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 263
+; CHECK-INDEXED-RV64-NEXT:    li a3, 262
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 128
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_782
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_272
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_782: # %cond.load1049
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 264
+; CHECK-INDEXED-RV64-NEXT:    li a3, 263
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 256
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_783
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_273
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_783: # %cond.load1053
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 265
+; CHECK-INDEXED-RV64-NEXT:    li a3, 264
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 512
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_784
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_274
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_784: # %cond.load1057
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 266
+; CHECK-INDEXED-RV64-NEXT:    li a3, 265
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 1024
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_785
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_275
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_785: # %cond.load1061
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 267
+; CHECK-INDEXED-RV64-NEXT:    li a3, 266
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 52
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_786
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_276
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_786: # %cond.load1065
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 268
+; CHECK-INDEXED-RV64-NEXT:    li a3, 267
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 51
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_787
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_277
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_787: # %cond.load1069
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 269
+; CHECK-INDEXED-RV64-NEXT:    li a3, 268
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 50
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_788
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_278
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_788: # %cond.load1073
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 270
+; CHECK-INDEXED-RV64-NEXT:    li a3, 269
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 49
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_789
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_279
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_789: # %cond.load1077
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 271
+; CHECK-INDEXED-RV64-NEXT:    li a3, 270
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 48
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_790
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_280
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_790: # %cond.load1081
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 272
+; CHECK-INDEXED-RV64-NEXT:    li a3, 271
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 47
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_791
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_281
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_791: # %cond.load1085
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 273
+; CHECK-INDEXED-RV64-NEXT:    li a3, 272
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 46
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_792
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_282
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_792: # %cond.load1089
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 274
+; CHECK-INDEXED-RV64-NEXT:    li a3, 273
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 45
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_793
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_283
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_793: # %cond.load1093
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 275
+; CHECK-INDEXED-RV64-NEXT:    li a3, 274
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 44
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_794
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_284
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_794: # %cond.load1097
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 276
+; CHECK-INDEXED-RV64-NEXT:    li a3, 275
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 43
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_795
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_285
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_795: # %cond.load1101
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 277
+; CHECK-INDEXED-RV64-NEXT:    li a3, 276
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 42
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_796
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_286
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_796: # %cond.load1105
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 278
+; CHECK-INDEXED-RV64-NEXT:    li a3, 277
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 41
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_797
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_287
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_797: # %cond.load1109
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 279
+; CHECK-INDEXED-RV64-NEXT:    li a3, 278
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 40
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_798
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_288
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_798: # %cond.load1113
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 280
+; CHECK-INDEXED-RV64-NEXT:    li a3, 279
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 39
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_799
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_289
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_799: # %cond.load1117
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 281
+; CHECK-INDEXED-RV64-NEXT:    li a3, 280
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 38
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_800
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_290
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_800: # %cond.load1121
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 282
+; CHECK-INDEXED-RV64-NEXT:    li a3, 281
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 37
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_801
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_291
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_801: # %cond.load1125
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 283
+; CHECK-INDEXED-RV64-NEXT:    li a3, 282
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 36
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_802
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_292
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_802: # %cond.load1129
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 284
+; CHECK-INDEXED-RV64-NEXT:    li a3, 283
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 35
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_803
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_293
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_803: # %cond.load1133
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 285
+; CHECK-INDEXED-RV64-NEXT:    li a3, 284
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 34
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_804
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_294
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_804: # %cond.load1137
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 286
+; CHECK-INDEXED-RV64-NEXT:    li a3, 285
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 33
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_805
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_295
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_805: # %cond.load1141
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 287
+; CHECK-INDEXED-RV64-NEXT:    li a3, 286
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 32
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_806
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_296
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_806: # %cond.load1145
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 288
+; CHECK-INDEXED-RV64-NEXT:    li a3, 287
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 31
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_807
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_297
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_807: # %cond.load1149
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 289
+; CHECK-INDEXED-RV64-NEXT:    li a3, 288
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 30
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_808
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_298
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_808: # %cond.load1153
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 290
+; CHECK-INDEXED-RV64-NEXT:    li a3, 289
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 29
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_809
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_299
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_809: # %cond.load1157
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 291
+; CHECK-INDEXED-RV64-NEXT:    li a3, 290
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 28
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_810
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_300
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_810: # %cond.load1161
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 292
+; CHECK-INDEXED-RV64-NEXT:    li a3, 291
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 27
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_811
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_301
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_811: # %cond.load1165
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 293
+; CHECK-INDEXED-RV64-NEXT:    li a3, 292
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 26
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_812
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_302
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_812: # %cond.load1169
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 294
+; CHECK-INDEXED-RV64-NEXT:    li a3, 293
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 25
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_813
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_303
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_813: # %cond.load1173
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 295
+; CHECK-INDEXED-RV64-NEXT:    li a3, 294
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 24
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_814
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_304
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_814: # %cond.load1177
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 296
+; CHECK-INDEXED-RV64-NEXT:    li a3, 295
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 23
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_815
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_305
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_815: # %cond.load1181
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 297
+; CHECK-INDEXED-RV64-NEXT:    li a3, 296
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 22
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_816
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_306
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_816: # %cond.load1185
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 298
+; CHECK-INDEXED-RV64-NEXT:    li a3, 297
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 21
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_817
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_307
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_817: # %cond.load1189
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 299
+; CHECK-INDEXED-RV64-NEXT:    li a3, 298
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 20
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_818
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_308
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_818: # %cond.load1193
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 300
+; CHECK-INDEXED-RV64-NEXT:    li a3, 299
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 19
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_819
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_309
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_819: # %cond.load1197
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 301
+; CHECK-INDEXED-RV64-NEXT:    li a3, 300
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 18
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_820
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_310
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_820: # %cond.load1201
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 302
+; CHECK-INDEXED-RV64-NEXT:    li a3, 301
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 17
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_821
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_311
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_821: # %cond.load1205
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 303
+; CHECK-INDEXED-RV64-NEXT:    li a3, 302
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 16
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_822
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_312
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_822: # %cond.load1209
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 304
+; CHECK-INDEXED-RV64-NEXT:    li a3, 303
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 15
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_823
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_313
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_823: # %cond.load1213
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 305
+; CHECK-INDEXED-RV64-NEXT:    li a3, 304
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 14
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_824
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_314
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_824: # %cond.load1217
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 306
+; CHECK-INDEXED-RV64-NEXT:    li a3, 305
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 13
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_825
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_315
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_825: # %cond.load1221
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 307
+; CHECK-INDEXED-RV64-NEXT:    li a3, 306
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 12
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_826
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_316
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_826: # %cond.load1225
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 308
+; CHECK-INDEXED-RV64-NEXT:    li a3, 307
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 11
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_827
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_317
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_827: # %cond.load1229
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 309
+; CHECK-INDEXED-RV64-NEXT:    li a3, 308
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 10
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_828
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_318
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_828: # %cond.load1233
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 310
+; CHECK-INDEXED-RV64-NEXT:    li a3, 309
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 9
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_829
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_319
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_829: # %cond.load1237
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 311
+; CHECK-INDEXED-RV64-NEXT:    li a3, 310
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 8
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_830
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_320
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_830: # %cond.load1241
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 312
+; CHECK-INDEXED-RV64-NEXT:    li a3, 311
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 7
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_831
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_321
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_831: # %cond.load1245
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 313
+; CHECK-INDEXED-RV64-NEXT:    li a3, 312
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 6
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_832
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_322
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_832: # %cond.load1249
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 314
+; CHECK-INDEXED-RV64-NEXT:    li a3, 313
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 5
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_833
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_323
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_833: # %cond.load1253
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 315
+; CHECK-INDEXED-RV64-NEXT:    li a3, 314
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 4
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_834
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_324
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_834: # %cond.load1257
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 316
+; CHECK-INDEXED-RV64-NEXT:    li a3, 315
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 3
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_835
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_325
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_835: # %cond.load1261
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 317
+; CHECK-INDEXED-RV64-NEXT:    li a3, 316
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 2
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_1029
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_326
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_1029: # %cond.load1261
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_327
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_836: # %cond.load1273
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 320
+; CHECK-INDEXED-RV64-NEXT:    li a3, 319
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 1
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_837
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_331
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_837: # %cond.load1277
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 321
+; CHECK-INDEXED-RV64-NEXT:    li a3, 320
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 2
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_838
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_332
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_838: # %cond.load1281
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 322
+; CHECK-INDEXED-RV64-NEXT:    li a3, 321
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 4
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_839
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_333
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_839: # %cond.load1285
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 323
+; CHECK-INDEXED-RV64-NEXT:    li a3, 322
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 8
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_840
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_334
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_840: # %cond.load1289
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 324
+; CHECK-INDEXED-RV64-NEXT:    li a3, 323
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 16
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_841
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_335
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_841: # %cond.load1293
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 325
+; CHECK-INDEXED-RV64-NEXT:    li a3, 324
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 32
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_842
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_336
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_842: # %cond.load1297
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 326
+; CHECK-INDEXED-RV64-NEXT:    li a3, 325
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 64
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_843
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_337
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_843: # %cond.load1301
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 327
+; CHECK-INDEXED-RV64-NEXT:    li a3, 326
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 128
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_844
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_338
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_844: # %cond.load1305
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 328
+; CHECK-INDEXED-RV64-NEXT:    li a3, 327
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 256
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_845
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_339
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_845: # %cond.load1309
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 329
+; CHECK-INDEXED-RV64-NEXT:    li a3, 328
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 512
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_846
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_340
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_846: # %cond.load1313
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 330
+; CHECK-INDEXED-RV64-NEXT:    li a3, 329
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 1024
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_847
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_341
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_847: # %cond.load1317
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 331
+; CHECK-INDEXED-RV64-NEXT:    li a3, 330
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 52
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_848
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_342
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_848: # %cond.load1321
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 332
+; CHECK-INDEXED-RV64-NEXT:    li a3, 331
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 51
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_849
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_343
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_849: # %cond.load1325
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 333
+; CHECK-INDEXED-RV64-NEXT:    li a3, 332
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 50
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_850
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_344
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_850: # %cond.load1329
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 334
+; CHECK-INDEXED-RV64-NEXT:    li a3, 333
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 49
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_851
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_345
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_851: # %cond.load1333
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 335
+; CHECK-INDEXED-RV64-NEXT:    li a3, 334
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 48
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_852
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_346
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_852: # %cond.load1337
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 336
+; CHECK-INDEXED-RV64-NEXT:    li a3, 335
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 47
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_853
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_347
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_853: # %cond.load1341
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 337
+; CHECK-INDEXED-RV64-NEXT:    li a3, 336
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 46
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_854
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_348
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_854: # %cond.load1345
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 338
+; CHECK-INDEXED-RV64-NEXT:    li a3, 337
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 45
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_855
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_349
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_855: # %cond.load1349
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 339
+; CHECK-INDEXED-RV64-NEXT:    li a3, 338
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 44
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_856
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_350
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_856: # %cond.load1353
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 340
+; CHECK-INDEXED-RV64-NEXT:    li a3, 339
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 43
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_857
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_351
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_857: # %cond.load1357
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 341
+; CHECK-INDEXED-RV64-NEXT:    li a3, 340
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 42
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_858
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_352
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_858: # %cond.load1361
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 342
+; CHECK-INDEXED-RV64-NEXT:    li a3, 341
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 41
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_859
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_353
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_859: # %cond.load1365
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 343
+; CHECK-INDEXED-RV64-NEXT:    li a3, 342
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 40
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_860
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_354
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_860: # %cond.load1369
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 344
+; CHECK-INDEXED-RV64-NEXT:    li a3, 343
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 39
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_861
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_355
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_861: # %cond.load1373
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 345
+; CHECK-INDEXED-RV64-NEXT:    li a3, 344
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 38
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_862
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_356
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_862: # %cond.load1377
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 346
+; CHECK-INDEXED-RV64-NEXT:    li a3, 345
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 37
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_863
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_357
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_863: # %cond.load1381
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 347
+; CHECK-INDEXED-RV64-NEXT:    li a3, 346
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 36
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_864
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_358
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_864: # %cond.load1385
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 348
+; CHECK-INDEXED-RV64-NEXT:    li a3, 347
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 35
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_865
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_359
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_865: # %cond.load1389
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 349
+; CHECK-INDEXED-RV64-NEXT:    li a3, 348
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 34
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_866
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_360
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_866: # %cond.load1393
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 350
+; CHECK-INDEXED-RV64-NEXT:    li a3, 349
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 33
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_867
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_361
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_867: # %cond.load1397
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 351
+; CHECK-INDEXED-RV64-NEXT:    li a3, 350
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 32
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_868
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_362
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_868: # %cond.load1401
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 352
+; CHECK-INDEXED-RV64-NEXT:    li a3, 351
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 31
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_869
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_363
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_869: # %cond.load1405
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 353
+; CHECK-INDEXED-RV64-NEXT:    li a3, 352
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 30
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_870
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_364
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_870: # %cond.load1409
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 354
+; CHECK-INDEXED-RV64-NEXT:    li a3, 353
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 29
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_871
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_365
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_871: # %cond.load1413
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 355
+; CHECK-INDEXED-RV64-NEXT:    li a3, 354
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 28
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_872
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_366
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_872: # %cond.load1417
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 356
+; CHECK-INDEXED-RV64-NEXT:    li a3, 355
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 27
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_873
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_367
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_873: # %cond.load1421
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 357
+; CHECK-INDEXED-RV64-NEXT:    li a3, 356
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 26
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_874
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_368
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_874: # %cond.load1425
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 358
+; CHECK-INDEXED-RV64-NEXT:    li a3, 357
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 25
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_875
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_369
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_875: # %cond.load1429
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 359
+; CHECK-INDEXED-RV64-NEXT:    li a3, 358
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 24
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_876
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_370
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_876: # %cond.load1433
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 360
+; CHECK-INDEXED-RV64-NEXT:    li a3, 359
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 23
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_877
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_371
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_877: # %cond.load1437
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 361
+; CHECK-INDEXED-RV64-NEXT:    li a3, 360
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 22
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_878
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_372
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_878: # %cond.load1441
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 362
+; CHECK-INDEXED-RV64-NEXT:    li a3, 361
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 21
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_879
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_373
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_879: # %cond.load1445
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 363
+; CHECK-INDEXED-RV64-NEXT:    li a3, 362
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 20
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_880
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_374
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_880: # %cond.load1449
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 364
+; CHECK-INDEXED-RV64-NEXT:    li a3, 363
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 19
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_881
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_375
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_881: # %cond.load1453
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 365
+; CHECK-INDEXED-RV64-NEXT:    li a3, 364
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 18
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_882
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_376
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_882: # %cond.load1457
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 366
+; CHECK-INDEXED-RV64-NEXT:    li a3, 365
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 17
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_883
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_377
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_883: # %cond.load1461
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 367
+; CHECK-INDEXED-RV64-NEXT:    li a3, 366
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 16
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_884
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_378
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_884: # %cond.load1465
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 368
+; CHECK-INDEXED-RV64-NEXT:    li a3, 367
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 15
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_885
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_379
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_885: # %cond.load1469
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 369
+; CHECK-INDEXED-RV64-NEXT:    li a3, 368
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 14
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_886
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_380
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_886: # %cond.load1473
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 370
+; CHECK-INDEXED-RV64-NEXT:    li a3, 369
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 13
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_887
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_381
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_887: # %cond.load1477
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 371
+; CHECK-INDEXED-RV64-NEXT:    li a3, 370
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 12
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_888
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_382
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_888: # %cond.load1481
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 372
+; CHECK-INDEXED-RV64-NEXT:    li a3, 371
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 11
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_889
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_383
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_889: # %cond.load1485
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 373
+; CHECK-INDEXED-RV64-NEXT:    li a3, 372
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 10
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_890
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_384
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_890: # %cond.load1489
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 374
+; CHECK-INDEXED-RV64-NEXT:    li a3, 373
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 9
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_891
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_385
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_891: # %cond.load1493
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 375
+; CHECK-INDEXED-RV64-NEXT:    li a3, 374
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 8
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_892
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_386
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_892: # %cond.load1497
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 376
+; CHECK-INDEXED-RV64-NEXT:    li a3, 375
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 7
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_893
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_387
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_893: # %cond.load1501
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 377
+; CHECK-INDEXED-RV64-NEXT:    li a3, 376
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 6
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_894
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_388
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_894: # %cond.load1505
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 378
+; CHECK-INDEXED-RV64-NEXT:    li a3, 377
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 5
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_895
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_389
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_895: # %cond.load1509
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 379
+; CHECK-INDEXED-RV64-NEXT:    li a3, 378
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 4
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_896
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_390
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_896: # %cond.load1513
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 380
+; CHECK-INDEXED-RV64-NEXT:    li a3, 379
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 3
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_897
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_391
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_897: # %cond.load1517
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 381
+; CHECK-INDEXED-RV64-NEXT:    li a3, 380
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 2
+; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_1030
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_392
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_1030: # %cond.load1517
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_393
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_898: # %cond.load1529
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 384
+; CHECK-INDEXED-RV64-NEXT:    li a3, 383
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 1
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_899
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_397
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_899: # %cond.load1533
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 385
+; CHECK-INDEXED-RV64-NEXT:    li a3, 384
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 2
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_900
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_398
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_900: # %cond.load1537
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 386
+; CHECK-INDEXED-RV64-NEXT:    li a3, 385
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 4
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_901
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_399
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_901: # %cond.load1541
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 387
+; CHECK-INDEXED-RV64-NEXT:    li a3, 386
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 8
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_902
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_400
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_902: # %cond.load1545
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 388
+; CHECK-INDEXED-RV64-NEXT:    li a3, 387
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 16
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_903
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_401
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_903: # %cond.load1549
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 389
+; CHECK-INDEXED-RV64-NEXT:    li a3, 388
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 32
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_904
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_402
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_904: # %cond.load1553
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 390
+; CHECK-INDEXED-RV64-NEXT:    li a3, 389
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 64
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_905
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_403
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_905: # %cond.load1557
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 391
+; CHECK-INDEXED-RV64-NEXT:    li a3, 390
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 128
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_906
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_404
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_906: # %cond.load1561
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 392
+; CHECK-INDEXED-RV64-NEXT:    li a3, 391
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 256
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_907
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_405
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_907: # %cond.load1565
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 393
+; CHECK-INDEXED-RV64-NEXT:    li a3, 392
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 512
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_908
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_406
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_908: # %cond.load1569
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 394
+; CHECK-INDEXED-RV64-NEXT:    li a3, 393
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 1024
+; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_909
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_407
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_909: # %cond.load1573
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 395
+; CHECK-INDEXED-RV64-NEXT:    li a3, 394
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 52
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_910
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_408
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_910: # %cond.load1577
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 396
+; CHECK-INDEXED-RV64-NEXT:    li a3, 395
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 51
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_911
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_409
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_911: # %cond.load1581
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 397
+; CHECK-INDEXED-RV64-NEXT:    li a3, 396
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 50
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_912
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_410
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_912: # %cond.load1585
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 398
+; CHECK-INDEXED-RV64-NEXT:    li a3, 397
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 49
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_913
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_411
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_913: # %cond.load1589
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 399
+; CHECK-INDEXED-RV64-NEXT:    li a3, 398
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 48
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_914
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_412
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_914: # %cond.load1593
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 400
+; CHECK-INDEXED-RV64-NEXT:    li a3, 399
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 47
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_915
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_413
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_915: # %cond.load1597
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 401
+; CHECK-INDEXED-RV64-NEXT:    li a3, 400
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 46
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_916
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_414
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_916: # %cond.load1601
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 402
+; CHECK-INDEXED-RV64-NEXT:    li a3, 401
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 45
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_917
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_415
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_917: # %cond.load1605
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 403
+; CHECK-INDEXED-RV64-NEXT:    li a3, 402
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 44
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_918
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_416
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_918: # %cond.load1609
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 404
+; CHECK-INDEXED-RV64-NEXT:    li a3, 403
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 43
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_919
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_417
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_919: # %cond.load1613
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 405
+; CHECK-INDEXED-RV64-NEXT:    li a3, 404
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 42
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_920
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_418
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_920: # %cond.load1617
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 406
+; CHECK-INDEXED-RV64-NEXT:    li a3, 405
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 41
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_921
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_419
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_921: # %cond.load1621
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 407
+; CHECK-INDEXED-RV64-NEXT:    li a3, 406
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 40
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_922
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_420
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_922: # %cond.load1625
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 408
+; CHECK-INDEXED-RV64-NEXT:    li a3, 407
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 39
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_923
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_421
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_923: # %cond.load1629
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 409
+; CHECK-INDEXED-RV64-NEXT:    li a3, 408
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 38
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_924
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_422
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_924: # %cond.load1633
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 410
+; CHECK-INDEXED-RV64-NEXT:    li a3, 409
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 37
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_925
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_423
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_925: # %cond.load1637
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 411
+; CHECK-INDEXED-RV64-NEXT:    li a3, 410
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 36
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_926
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_424
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_926: # %cond.load1641
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 412
+; CHECK-INDEXED-RV64-NEXT:    li a3, 411
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 35
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_927
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_425
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_927: # %cond.load1645
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 413
+; CHECK-INDEXED-RV64-NEXT:    li a3, 412
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 34
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_928
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_426
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_928: # %cond.load1649
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 414
+; CHECK-INDEXED-RV64-NEXT:    li a3, 413
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 33
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_929
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_427
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_929: # %cond.load1653
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 415
+; CHECK-INDEXED-RV64-NEXT:    li a3, 414
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 32
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_930
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_428
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_930: # %cond.load1657
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 416
+; CHECK-INDEXED-RV64-NEXT:    li a3, 415
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 31
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_931
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_429
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_931: # %cond.load1661
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 417
+; CHECK-INDEXED-RV64-NEXT:    li a3, 416
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 30
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_932
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_430
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_932: # %cond.load1665
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 418
+; CHECK-INDEXED-RV64-NEXT:    li a3, 417
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 29
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_933
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_431
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_933: # %cond.load1669
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 419
+; CHECK-INDEXED-RV64-NEXT:    li a3, 418
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 28
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_934
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_432
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_934: # %cond.load1673
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 420
+; CHECK-INDEXED-RV64-NEXT:    li a3, 419
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 27
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_935
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_433
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_935: # %cond.load1677
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 421
+; CHECK-INDEXED-RV64-NEXT:    li a3, 420
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 26
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_936
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_434
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_936: # %cond.load1681
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 422
+; CHECK-INDEXED-RV64-NEXT:    li a3, 421
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 25
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_937
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_435
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_937: # %cond.load1685
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 423
+; CHECK-INDEXED-RV64-NEXT:    li a3, 422
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 24
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_938
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_436
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_938: # %cond.load1689
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 424
+; CHECK-INDEXED-RV64-NEXT:    li a3, 423
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 23
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_939
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_437
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_939: # %cond.load1693
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 425
+; CHECK-INDEXED-RV64-NEXT:    li a3, 424
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 22
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_940
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_438
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_940: # %cond.load1697
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 426
+; CHECK-INDEXED-RV64-NEXT:    li a3, 425
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 21
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_941
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_439
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_941: # %cond.load1701
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 427
+; CHECK-INDEXED-RV64-NEXT:    li a3, 426
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 20
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_942
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_440
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_942: # %cond.load1705
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 428
+; CHECK-INDEXED-RV64-NEXT:    li a3, 427
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 19
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_943
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_441
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_943: # %cond.load1709
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 429
+; CHECK-INDEXED-RV64-NEXT:    li a3, 428
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 18
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_944
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_442
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_944: # %cond.load1713
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 430
+; CHECK-INDEXED-RV64-NEXT:    li a3, 429
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 17
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_945
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_443
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_945: # %cond.load1717
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 431
+; CHECK-INDEXED-RV64-NEXT:    li a3, 430
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 16
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_946
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_444
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_946: # %cond.load1721
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 432
+; CHECK-INDEXED-RV64-NEXT:    li a3, 431
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 15
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_947
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_445
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_947: # %cond.load1725
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 433
+; CHECK-INDEXED-RV64-NEXT:    li a3, 432
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 14
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_948
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_446
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_948: # %cond.load1729
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 434
+; CHECK-INDEXED-RV64-NEXT:    li a3, 433
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 13
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_949
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_447
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_949: # %cond.load1733
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 435
+; CHECK-INDEXED-RV64-NEXT:    li a3, 434
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 12
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_950
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_448
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_950: # %cond.load1737
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 436
+; CHECK-INDEXED-RV64-NEXT:    li a3, 435
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 11
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_951
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_449
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_951: # %cond.load1741
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 437
+; CHECK-INDEXED-RV64-NEXT:    li a3, 436
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 10
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_952
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_450
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_952: # %cond.load1745
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 438
+; CHECK-INDEXED-RV64-NEXT:    li a3, 437
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 9
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_953
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_451
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_953: # %cond.load1749
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 439
+; CHECK-INDEXED-RV64-NEXT:    li a3, 438
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 8
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_954
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_452
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_954: # %cond.load1753
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 440
+; CHECK-INDEXED-RV64-NEXT:    li a3, 439
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 7
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_955
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_453
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_955: # %cond.load1757
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 441
+; CHECK-INDEXED-RV64-NEXT:    li a3, 440
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 6
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_956
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_454
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_956: # %cond.load1761
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 442
+; CHECK-INDEXED-RV64-NEXT:    li a3, 441
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 5
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_957
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_455
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_957: # %cond.load1765
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 443
+; CHECK-INDEXED-RV64-NEXT:    li a3, 442
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 4
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_958
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_456
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_958: # %cond.load1769
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 444
+; CHECK-INDEXED-RV64-NEXT:    li a3, 443
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 3
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_959
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_457
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_959: # %cond.load1773
+; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-INDEXED-RV64-NEXT:    li a1, 445
+; CHECK-INDEXED-RV64-NEXT:    li a3, 444
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 2
+; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_1031
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_458
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_1031: # %cond.load1773
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_459
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_960: # %cond.load1785
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 448
+; CHECK-INDEXED-RV64-NEXT:    li a3, 447
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 1
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_961
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_463
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_961: # %cond.load1789
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 449
+; CHECK-INDEXED-RV64-NEXT:    li a3, 448
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 2
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_962
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_464
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_962: # %cond.load1793
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 450
+; CHECK-INDEXED-RV64-NEXT:    li a3, 449
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 4
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_963
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_465
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_963: # %cond.load1797
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 451
+; CHECK-INDEXED-RV64-NEXT:    li a3, 450
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 8
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_964
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_466
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_964: # %cond.load1801
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 452
+; CHECK-INDEXED-RV64-NEXT:    li a3, 451
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 16
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_965
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_467
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_965: # %cond.load1805
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 453
+; CHECK-INDEXED-RV64-NEXT:    li a3, 452
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 32
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_966
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_468
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_966: # %cond.load1809
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 454
+; CHECK-INDEXED-RV64-NEXT:    li a3, 453
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 64
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_967
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_469
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_967: # %cond.load1813
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 455
+; CHECK-INDEXED-RV64-NEXT:    li a3, 454
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 128
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_968
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_470
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_968: # %cond.load1817
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 456
+; CHECK-INDEXED-RV64-NEXT:    li a3, 455
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 256
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_969
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_471
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_969: # %cond.load1821
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 457
+; CHECK-INDEXED-RV64-NEXT:    li a3, 456
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 512
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_970
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_472
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_970: # %cond.load1825
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 458
+; CHECK-INDEXED-RV64-NEXT:    li a3, 457
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 1024
+; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_971
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_473
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_971: # %cond.load1829
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 459
+; CHECK-INDEXED-RV64-NEXT:    li a3, 458
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 52
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_972
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_474
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_972: # %cond.load1833
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 460
+; CHECK-INDEXED-RV64-NEXT:    li a3, 459
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 51
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_973
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_475
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_973: # %cond.load1837
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 461
+; CHECK-INDEXED-RV64-NEXT:    li a3, 460
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 50
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_974
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_476
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_974: # %cond.load1841
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 462
+; CHECK-INDEXED-RV64-NEXT:    li a3, 461
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 49
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_975
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_477
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_975: # %cond.load1845
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 463
+; CHECK-INDEXED-RV64-NEXT:    li a3, 462
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 48
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_976
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_478
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_976: # %cond.load1849
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 464
+; CHECK-INDEXED-RV64-NEXT:    li a3, 463
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 47
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_977
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_479
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_977: # %cond.load1853
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 465
+; CHECK-INDEXED-RV64-NEXT:    li a3, 464
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 46
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_978
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_480
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_978: # %cond.load1857
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 466
+; CHECK-INDEXED-RV64-NEXT:    li a3, 465
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 45
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_979
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_481
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_979: # %cond.load1861
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 467
+; CHECK-INDEXED-RV64-NEXT:    li a3, 466
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 44
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_980
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_482
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_980: # %cond.load1865
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 468
+; CHECK-INDEXED-RV64-NEXT:    li a3, 467
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 43
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_981
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_483
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_981: # %cond.load1869
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 469
+; CHECK-INDEXED-RV64-NEXT:    li a3, 468
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 42
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_982
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_484
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_982: # %cond.load1873
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 470
+; CHECK-INDEXED-RV64-NEXT:    li a3, 469
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 41
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_983
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_485
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_983: # %cond.load1877
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 471
+; CHECK-INDEXED-RV64-NEXT:    li a3, 470
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 40
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_984
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_486
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_984: # %cond.load1881
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 472
+; CHECK-INDEXED-RV64-NEXT:    li a3, 471
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 39
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_985
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_487
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_985: # %cond.load1885
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 473
+; CHECK-INDEXED-RV64-NEXT:    li a3, 472
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 38
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_986
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_488
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_986: # %cond.load1889
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 474
+; CHECK-INDEXED-RV64-NEXT:    li a3, 473
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 37
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_987
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_489
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_987: # %cond.load1893
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 475
+; CHECK-INDEXED-RV64-NEXT:    li a3, 474
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 36
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_988
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_490
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_988: # %cond.load1897
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 476
+; CHECK-INDEXED-RV64-NEXT:    li a3, 475
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 35
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_989
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_491
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_989: # %cond.load1901
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 477
+; CHECK-INDEXED-RV64-NEXT:    li a3, 476
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 34
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_990
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_492
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_990: # %cond.load1905
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 478
+; CHECK-INDEXED-RV64-NEXT:    li a3, 477
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 33
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_991
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_493
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_991: # %cond.load1909
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 479
+; CHECK-INDEXED-RV64-NEXT:    li a3, 478
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 32
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_992
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_494
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_992: # %cond.load1913
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 480
+; CHECK-INDEXED-RV64-NEXT:    li a3, 479
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 31
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_993
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_495
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_993: # %cond.load1917
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 481
+; CHECK-INDEXED-RV64-NEXT:    li a3, 480
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 30
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_994
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_496
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_994: # %cond.load1921
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 482
+; CHECK-INDEXED-RV64-NEXT:    li a3, 481
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 29
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_995
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_497
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_995: # %cond.load1925
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 483
+; CHECK-INDEXED-RV64-NEXT:    li a3, 482
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 28
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_996
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_498
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_996: # %cond.load1929
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 484
+; CHECK-INDEXED-RV64-NEXT:    li a3, 483
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 27
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_997
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_499
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_997: # %cond.load1933
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 485
+; CHECK-INDEXED-RV64-NEXT:    li a3, 484
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 26
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_998
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_500
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_998: # %cond.load1937
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 486
+; CHECK-INDEXED-RV64-NEXT:    li a3, 485
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 25
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_999
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_501
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_999: # %cond.load1941
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 487
+; CHECK-INDEXED-RV64-NEXT:    li a3, 486
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 24
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1000
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_502
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_1000: # %cond.load1945
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 488
+; CHECK-INDEXED-RV64-NEXT:    li a3, 487
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 23
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1001
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_503
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_1001: # %cond.load1949
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 489
+; CHECK-INDEXED-RV64-NEXT:    li a3, 488
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 22
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1002
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_504
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_1002: # %cond.load1953
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 490
+; CHECK-INDEXED-RV64-NEXT:    li a3, 489
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 21
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1003
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_505
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_1003: # %cond.load1957
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 491
+; CHECK-INDEXED-RV64-NEXT:    li a3, 490
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 20
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1004
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_506
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_1004: # %cond.load1961
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 492
+; CHECK-INDEXED-RV64-NEXT:    li a3, 491
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 19
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1005
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_507
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_1005: # %cond.load1965
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 493
+; CHECK-INDEXED-RV64-NEXT:    li a3, 492
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 18
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1006
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_508
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_1006: # %cond.load1969
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 494
+; CHECK-INDEXED-RV64-NEXT:    li a3, 493
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 17
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1007
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_509
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_1007: # %cond.load1973
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 495
+; CHECK-INDEXED-RV64-NEXT:    li a3, 494
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 16
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1008
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_510
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_1008: # %cond.load1977
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 496
+; CHECK-INDEXED-RV64-NEXT:    li a3, 495
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 15
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1009
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_511
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_1009: # %cond.load1981
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 497
+; CHECK-INDEXED-RV64-NEXT:    li a3, 496
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 14
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1010
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_512
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_1010: # %cond.load1985
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 498
+; CHECK-INDEXED-RV64-NEXT:    li a3, 497
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 13
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1011
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_513
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_1011: # %cond.load1989
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 499
+; CHECK-INDEXED-RV64-NEXT:    li a3, 498
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 12
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1012
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_514
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_1012: # %cond.load1993
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 500
+; CHECK-INDEXED-RV64-NEXT:    li a3, 499
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 11
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1013
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_515
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_1013: # %cond.load1997
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 501
+; CHECK-INDEXED-RV64-NEXT:    li a3, 500
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 10
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1014
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_516
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_1014: # %cond.load2001
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 502
+; CHECK-INDEXED-RV64-NEXT:    li a3, 501
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 9
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1015
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_517
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_1015: # %cond.load2005
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 503
+; CHECK-INDEXED-RV64-NEXT:    li a3, 502
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 8
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1016
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_518
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_1016: # %cond.load2009
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 504
+; CHECK-INDEXED-RV64-NEXT:    li a3, 503
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 7
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1017
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_519
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_1017: # %cond.load2013
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 505
+; CHECK-INDEXED-RV64-NEXT:    li a3, 504
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 6
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1018
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_520
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_1018: # %cond.load2017
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 506
+; CHECK-INDEXED-RV64-NEXT:    li a3, 505
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 5
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1019
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_521
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_1019: # %cond.load2021
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 507
+; CHECK-INDEXED-RV64-NEXT:    li a3, 506
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 4
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1020
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_522
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_1020: # %cond.load2025
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 508
+; CHECK-INDEXED-RV64-NEXT:    li a3, 507
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 3
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1021
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_523
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_1021: # %cond.load2029
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 509
+; CHECK-INDEXED-RV64-NEXT:    li a3, 508
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 2
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1022
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_524
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_1022: # %cond.load2033
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 510
+; CHECK-INDEXED-RV64-NEXT:    li a3, 509
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 1
+; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1023
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_525
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_1023: # %cond.load2037
+; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a3, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-INDEXED-RV64-NEXT:    li a2, 511
+; CHECK-INDEXED-RV64-NEXT:    li a3, 510
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
+; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_1024
+; CHECK-INDEXED-RV64-NEXT:    j .LBB61_526
+; CHECK-INDEXED-RV64-NEXT:  .LBB61_1024: # %cond.load2041
+; CHECK-INDEXED-RV64-NEXT:    lbu a0, 0(a0)
+; CHECK-INDEXED-RV64-NEXT:    li a1, 512
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a0
+; CHECK-INDEXED-RV64-NEXT:    li a0, 511
+; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a0
+; CHECK-INDEXED-RV64-NEXT:    ret
   %res = call <512 x i8> @llvm.masked.expandload.v512i8(ptr align 1 %base, <512 x i1> %mask, <512 x i8> %passthru)
   ret <512 x i8> %res
 }
diff --git a/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-expandload-fp.ll b/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-expandload-fp.ll
index a83036b2f9342f..c538a6de7bef3b 100644
--- a/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-expandload-fp.ll
+++ b/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-expandload-fp.ll
@@ -1,279 +1,348 @@
 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 4
-; RUN: llc -mtriple=riscv32 -mattr=+m,+v,+f,+d,+zfh,+zvfh -verify-machineinstrs < %s | FileCheck %s --check-prefixes=RV32
-; RUN: llc -mtriple=riscv64 -mattr=+m,+v,+f,+d,+zfh,+zvfh -verify-machineinstrs < %s | FileCheck %s --check-prefixes=RV64
+; RUN: llc -verify-machineinstrs -mtriple=riscv32 -mattr=+m,+v,+f,+d,+zfh,+zvfh %s -o - \
+; RUN:   | FileCheck %s --check-prefixes=CHECK-VRGATHER,CHECK-VRGATHER-RV32
+; RUN: llc -verify-machineinstrs -mtriple=riscv64 -mattr=+m,+v,+f,+d,+zfh,+zvfh %s -o - \
+; RUN:   | FileCheck %s --check-prefixes=CHECK-VRGATHER,CHECK-VRGATHER-RV64
+; RUN: llc -verify-machineinstrs -mtriple=riscv32 -mattr=+m,+v,+f,+d,+zfh,+zvfh,+optimized-indexed-load-store %s -o - \
+; RUN:   | FileCheck %s --check-prefixes=CHECK-INDEXED,CHECK-INDEXED-RV32
+; RUN: llc -verify-machineinstrs -mtriple=riscv64 -mattr=+m,+v,+f,+d,+zfh,+zvfh,+optimized-indexed-load-store %s -o - \
+; RUN:   | FileCheck %s --check-prefixes=CHECK-INDEXED,CHECK-INDEXED-RV64
 
 declare <1 x half> @llvm.masked.expandload.v1f16(ptr, <1 x i1>, <1 x half>)
 define <1 x half> @expandload_v1f16(ptr %base, <1 x half> %src0, <1 x i1> %mask) {
-; RV32-LABEL: expandload_v1f16:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 1, e16, mf4, ta, ma
-; RV32-NEXT:    viota.m v9, v0
-; RV32-NEXT:    vsll.vi v9, v9, 1, v0.t
-; RV32-NEXT:    vsetvli zero, zero, e16, mf4, ta, mu
-; RV32-NEXT:    vluxei16.v v8, (a0), v9, v0.t
-; RV32-NEXT:    ret
+; CHECK-VRGATHER-LABEL: expandload_v1f16:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e8, mf8, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e16, mf4, ta, ma
+; CHECK-VRGATHER-NEXT:    vle16.v v9, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e16, mf4, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v10, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-VRGATHER-NEXT:    ret
 ;
-; RV64-LABEL: expandload_v1f16:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 1, e16, mf4, ta, ma
-; RV64-NEXT:    viota.m v9, v0
-; RV64-NEXT:    vsll.vi v9, v9, 1, v0.t
-; RV64-NEXT:    vsetvli zero, zero, e16, mf4, ta, mu
-; RV64-NEXT:    vluxei16.v v8, (a0), v9, v0.t
-; RV64-NEXT:    ret
+; CHECK-INDEXED-LABEL: expandload_v1f16:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    vsetivli zero, 1, e16, mf4, ta, ma
+; CHECK-INDEXED-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 1, v0.t
+; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e16, mf4, ta, mu
+; CHECK-INDEXED-NEXT:    vluxei16.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <1 x half> @llvm.masked.expandload.v1f16(ptr align 2 %base, <1 x i1> %mask, <1 x half> %src0)
   ret <1 x half>%res
 }
 
 declare <2 x half> @llvm.masked.expandload.v2f16(ptr, <2 x i1>, <2 x half>)
 define <2 x half> @expandload_v2f16(ptr %base, <2 x half> %src0, <2 x i1> %mask) {
-; RV32-LABEL: expandload_v2f16:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 2, e16, mf4, ta, ma
-; RV32-NEXT:    viota.m v9, v0
-; RV32-NEXT:    vsll.vi v9, v9, 1, v0.t
-; RV32-NEXT:    vsetvli zero, zero, e16, mf4, ta, mu
-; RV32-NEXT:    vluxei16.v v8, (a0), v9, v0.t
-; RV32-NEXT:    ret
+; CHECK-VRGATHER-LABEL: expandload_v2f16:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e16, mf4, ta, ma
+; CHECK-VRGATHER-NEXT:    vle16.v v9, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e16, mf4, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v10, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-VRGATHER-NEXT:    ret
 ;
-; RV64-LABEL: expandload_v2f16:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 2, e16, mf4, ta, ma
-; RV64-NEXT:    viota.m v9, v0
-; RV64-NEXT:    vsll.vi v9, v9, 1, v0.t
-; RV64-NEXT:    vsetvli zero, zero, e16, mf4, ta, mu
-; RV64-NEXT:    vluxei16.v v8, (a0), v9, v0.t
-; RV64-NEXT:    ret
+; CHECK-INDEXED-LABEL: expandload_v2f16:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    vsetivli zero, 2, e16, mf4, ta, ma
+; CHECK-INDEXED-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 1, v0.t
+; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e16, mf4, ta, mu
+; CHECK-INDEXED-NEXT:    vluxei16.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <2 x half> @llvm.masked.expandload.v2f16(ptr align 2 %base, <2 x i1> %mask, <2 x half> %src0)
   ret <2 x half>%res
 }
 
 declare <4 x half> @llvm.masked.expandload.v4f16(ptr, <4 x i1>, <4 x half>)
 define <4 x half> @expandload_v4f16(ptr %base, <4 x half> %src0, <4 x i1> %mask) {
-; RV32-LABEL: expandload_v4f16:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 4, e16, mf2, ta, ma
-; RV32-NEXT:    viota.m v9, v0
-; RV32-NEXT:    vsll.vi v9, v9, 1, v0.t
-; RV32-NEXT:    vsetvli zero, zero, e16, mf2, ta, mu
-; RV32-NEXT:    vluxei16.v v8, (a0), v9, v0.t
-; RV32-NEXT:    ret
+; CHECK-VRGATHER-LABEL: expandload_v4f16:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e8, mf4, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e16, mf2, ta, ma
+; CHECK-VRGATHER-NEXT:    vle16.v v9, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e16, mf2, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v10, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-VRGATHER-NEXT:    ret
 ;
-; RV64-LABEL: expandload_v4f16:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 4, e16, mf2, ta, ma
-; RV64-NEXT:    viota.m v9, v0
-; RV64-NEXT:    vsll.vi v9, v9, 1, v0.t
-; RV64-NEXT:    vsetvli zero, zero, e16, mf2, ta, mu
-; RV64-NEXT:    vluxei16.v v8, (a0), v9, v0.t
-; RV64-NEXT:    ret
+; CHECK-INDEXED-LABEL: expandload_v4f16:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    vsetivli zero, 4, e16, mf2, ta, ma
+; CHECK-INDEXED-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 1, v0.t
+; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e16, mf2, ta, mu
+; CHECK-INDEXED-NEXT:    vluxei16.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <4 x half> @llvm.masked.expandload.v4f16(ptr align 2 %base, <4 x i1> %mask, <4 x half> %src0)
   ret <4 x half>%res
 }
 
 declare <8 x half> @llvm.masked.expandload.v8f16(ptr, <8 x i1>, <8 x half>)
 define <8 x half> @expandload_v8f16(ptr %base, <8 x half> %src0, <8 x i1> %mask) {
-; RV32-LABEL: expandload_v8f16:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 8, e16, m1, ta, ma
-; RV32-NEXT:    viota.m v9, v0
-; RV32-NEXT:    vsll.vi v9, v9, 1, v0.t
-; RV32-NEXT:    vsetvli zero, zero, e16, m1, ta, mu
-; RV32-NEXT:    vluxei16.v v8, (a0), v9, v0.t
-; RV32-NEXT:    ret
+; CHECK-VRGATHER-LABEL: expandload_v8f16:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e16, m1, ta, ma
+; CHECK-VRGATHER-NEXT:    vle16.v v9, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e16, m1, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v10, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-VRGATHER-NEXT:    ret
 ;
-; RV64-LABEL: expandload_v8f16:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 8, e16, m1, ta, ma
-; RV64-NEXT:    viota.m v9, v0
-; RV64-NEXT:    vsll.vi v9, v9, 1, v0.t
-; RV64-NEXT:    vsetvli zero, zero, e16, m1, ta, mu
-; RV64-NEXT:    vluxei16.v v8, (a0), v9, v0.t
-; RV64-NEXT:    ret
+; CHECK-INDEXED-LABEL: expandload_v8f16:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    vsetivli zero, 8, e16, m1, ta, ma
+; CHECK-INDEXED-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 1, v0.t
+; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e16, m1, ta, mu
+; CHECK-INDEXED-NEXT:    vluxei16.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <8 x half> @llvm.masked.expandload.v8f16(ptr align 2 %base, <8 x i1> %mask, <8 x half> %src0)
   ret <8 x half>%res
 }
 
 declare <1 x float> @llvm.masked.expandload.v1f32(ptr, <1 x i1>, <1 x float>)
 define <1 x float> @expandload_v1f32(ptr %base, <1 x float> %src0, <1 x i1> %mask) {
-; RV32-LABEL: expandload_v1f32:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 1, e32, mf2, ta, ma
-; RV32-NEXT:    viota.m v9, v0
-; RV32-NEXT:    vsll.vi v9, v9, 2, v0.t
-; RV32-NEXT:    vsetvli zero, zero, e32, mf2, ta, mu
-; RV32-NEXT:    vluxei32.v v8, (a0), v9, v0.t
-; RV32-NEXT:    ret
+; CHECK-VRGATHER-LABEL: expandload_v1f32:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e8, mf8, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e32, mf2, ta, ma
+; CHECK-VRGATHER-NEXT:    vle32.v v9, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e32, mf2, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v10, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-VRGATHER-NEXT:    ret
 ;
-; RV64-LABEL: expandload_v1f32:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 1, e32, mf2, ta, ma
-; RV64-NEXT:    viota.m v9, v0
-; RV64-NEXT:    vsll.vi v9, v9, 2, v0.t
-; RV64-NEXT:    vsetvli zero, zero, e32, mf2, ta, mu
-; RV64-NEXT:    vluxei32.v v8, (a0), v9, v0.t
-; RV64-NEXT:    ret
+; CHECK-INDEXED-LABEL: expandload_v1f32:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    vsetivli zero, 1, e32, mf2, ta, ma
+; CHECK-INDEXED-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 2, v0.t
+; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e32, mf2, ta, mu
+; CHECK-INDEXED-NEXT:    vluxei32.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <1 x float> @llvm.masked.expandload.v1f32(ptr align 4 %base, <1 x i1> %mask, <1 x float> %src0)
   ret <1 x float>%res
 }
 
 declare <2 x float> @llvm.masked.expandload.v2f32(ptr, <2 x i1>, <2 x float>)
 define <2 x float> @expandload_v2f32(ptr %base, <2 x float> %src0, <2 x i1> %mask) {
-; RV32-LABEL: expandload_v2f32:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
-; RV32-NEXT:    viota.m v9, v0
-; RV32-NEXT:    vsll.vi v9, v9, 2, v0.t
-; RV32-NEXT:    vsetvli zero, zero, e32, mf2, ta, mu
-; RV32-NEXT:    vluxei32.v v8, (a0), v9, v0.t
-; RV32-NEXT:    ret
+; CHECK-VRGATHER-LABEL: expandload_v2f32:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e32, mf2, ta, ma
+; CHECK-VRGATHER-NEXT:    vle32.v v9, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e32, mf2, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v10, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-VRGATHER-NEXT:    ret
 ;
-; RV64-LABEL: expandload_v2f32:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
-; RV64-NEXT:    viota.m v9, v0
-; RV64-NEXT:    vsll.vi v9, v9, 2, v0.t
-; RV64-NEXT:    vsetvli zero, zero, e32, mf2, ta, mu
-; RV64-NEXT:    vluxei32.v v8, (a0), v9, v0.t
-; RV64-NEXT:    ret
+; CHECK-INDEXED-LABEL: expandload_v2f32:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
+; CHECK-INDEXED-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 2, v0.t
+; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e32, mf2, ta, mu
+; CHECK-INDEXED-NEXT:    vluxei32.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <2 x float> @llvm.masked.expandload.v2f32(ptr align 4 %base, <2 x i1> %mask, <2 x float> %src0)
   ret <2 x float>%res
 }
 
 declare <4 x float> @llvm.masked.expandload.v4f32(ptr, <4 x i1>, <4 x float>)
 define <4 x float> @expandload_v4f32(ptr %base, <4 x float> %src0, <4 x i1> %mask) {
-; RV32-LABEL: expandload_v4f32:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
-; RV32-NEXT:    viota.m v9, v0
-; RV32-NEXT:    vsll.vi v9, v9, 2, v0.t
-; RV32-NEXT:    vsetvli zero, zero, e32, m1, ta, mu
-; RV32-NEXT:    vluxei32.v v8, (a0), v9, v0.t
-; RV32-NEXT:    ret
+; CHECK-VRGATHER-LABEL: expandload_v4f32:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e8, mf4, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e32, m1, ta, ma
+; CHECK-VRGATHER-NEXT:    vle32.v v9, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e32, m1, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v10, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-VRGATHER-NEXT:    ret
 ;
-; RV64-LABEL: expandload_v4f32:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
-; RV64-NEXT:    viota.m v9, v0
-; RV64-NEXT:    vsll.vi v9, v9, 2, v0.t
-; RV64-NEXT:    vsetvli zero, zero, e32, m1, ta, mu
-; RV64-NEXT:    vluxei32.v v8, (a0), v9, v0.t
-; RV64-NEXT:    ret
+; CHECK-INDEXED-LABEL: expandload_v4f32:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
+; CHECK-INDEXED-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 2, v0.t
+; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e32, m1, ta, mu
+; CHECK-INDEXED-NEXT:    vluxei32.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <4 x float> @llvm.masked.expandload.v4f32(ptr align 4 %base, <4 x i1> %mask, <4 x float> %src0)
   ret <4 x float>%res
 }
 
 declare <8 x float> @llvm.masked.expandload.v8f32(ptr, <8 x i1>, <8 x float>)
 define <8 x float> @expandload_v8f32(ptr %base, <8 x float> %src0, <8 x i1> %mask) {
-; RV32-LABEL: expandload_v8f32:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
-; RV32-NEXT:    viota.m v10, v0
-; RV32-NEXT:    vsll.vi v10, v10, 2, v0.t
-; RV32-NEXT:    vsetvli zero, zero, e32, m2, ta, mu
-; RV32-NEXT:    vluxei32.v v8, (a0), v10, v0.t
-; RV32-NEXT:    ret
+; CHECK-VRGATHER-LABEL: expandload_v8f32:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e32, m2, ta, ma
+; CHECK-VRGATHER-NEXT:    vle32.v v10, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e32, m2, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v12, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v10, v12, v0.t
+; CHECK-VRGATHER-NEXT:    ret
 ;
-; RV64-LABEL: expandload_v8f32:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
-; RV64-NEXT:    viota.m v10, v0
-; RV64-NEXT:    vsll.vi v10, v10, 2, v0.t
-; RV64-NEXT:    vsetvli zero, zero, e32, m2, ta, mu
-; RV64-NEXT:    vluxei32.v v8, (a0), v10, v0.t
-; RV64-NEXT:    ret
+; CHECK-INDEXED-LABEL: expandload_v8f32:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
+; CHECK-INDEXED-NEXT:    viota.m v10, v0
+; CHECK-INDEXED-NEXT:    vsll.vi v10, v10, 2, v0.t
+; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e32, m2, ta, mu
+; CHECK-INDEXED-NEXT:    vluxei32.v v8, (a0), v10, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <8 x float> @llvm.masked.expandload.v8f32(ptr align 4 %base, <8 x i1> %mask, <8 x float> %src0)
   ret <8 x float>%res
 }
 
 declare <1 x double> @llvm.masked.expandload.v1f64(ptr, <1 x i1>, <1 x double>)
 define <1 x double> @expandload_v1f64(ptr %base, <1 x double> %src0, <1 x i1> %mask) {
-; RV32-LABEL: expandload_v1f64:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 1, e32, mf2, ta, ma
-; RV32-NEXT:    viota.m v9, v0
-; RV32-NEXT:    vsll.vi v9, v9, 3, v0.t
-; RV32-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
-; RV32-NEXT:    vluxei32.v v8, (a0), v9, v0.t
-; RV32-NEXT:    ret
+; CHECK-VRGATHER-LABEL: expandload_v1f64:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e8, mf8, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
+; CHECK-VRGATHER-NEXT:    vle64.v v9, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e64, m1, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v10, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-VRGATHER-NEXT:    ret
 ;
-; RV64-LABEL: expandload_v1f64:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; RV64-NEXT:    viota.m v9, v0
-; RV64-NEXT:    vsll.vi v9, v9, 3, v0.t
-; RV64-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
-; RV64-NEXT:    vluxei64.v v8, (a0), v9, v0.t
-; RV64-NEXT:    ret
+; CHECK-INDEXED-RV32-LABEL: expandload_v1f64:
+; CHECK-INDEXED-RV32:       # %bb.0:
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e32, mf2, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-RV32-NEXT:    vsll.vi v9, v9, 3, v0.t
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
+; CHECK-INDEXED-RV32-NEXT:    vluxei32.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-RV32-NEXT:    ret
+;
+; CHECK-INDEXED-RV64-LABEL: expandload_v1f64:
+; CHECK-INDEXED-RV64:       # %bb.0:
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-RV64-NEXT:    vsll.vi v9, v9, 3, v0.t
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
+; CHECK-INDEXED-RV64-NEXT:    vluxei64.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-RV64-NEXT:    ret
   %res = call <1 x double> @llvm.masked.expandload.v1f64(ptr align 8 %base, <1 x i1> %mask, <1 x double> %src0)
   ret <1 x double>%res
 }
 
 declare <2 x double> @llvm.masked.expandload.v2f64(ptr, <2 x i1>, <2 x double>)
 define <2 x double> @expandload_v2f64(ptr %base, <2 x double> %src0, <2 x i1> %mask) {
-; RV32-LABEL: expandload_v2f64:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
-; RV32-NEXT:    viota.m v9, v0
-; RV32-NEXT:    vsll.vi v9, v9, 3, v0.t
-; RV32-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
-; RV32-NEXT:    vluxei32.v v8, (a0), v9, v0.t
-; RV32-NEXT:    ret
+; CHECK-VRGATHER-LABEL: expandload_v2f64:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
+; CHECK-VRGATHER-NEXT:    vle64.v v9, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e64, m1, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v10, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-VRGATHER-NEXT:    ret
+;
+; CHECK-INDEXED-RV32-LABEL: expandload_v2f64:
+; CHECK-INDEXED-RV32:       # %bb.0:
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-RV32-NEXT:    vsll.vi v9, v9, 3, v0.t
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
+; CHECK-INDEXED-RV32-NEXT:    vluxei32.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-RV32-NEXT:    ret
 ;
-; RV64-LABEL: expandload_v2f64:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 2, e64, m1, ta, ma
-; RV64-NEXT:    viota.m v9, v0
-; RV64-NEXT:    vsll.vi v9, v9, 3, v0.t
-; RV64-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
-; RV64-NEXT:    vluxei64.v v8, (a0), v9, v0.t
-; RV64-NEXT:    ret
+; CHECK-INDEXED-RV64-LABEL: expandload_v2f64:
+; CHECK-INDEXED-RV64:       # %bb.0:
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 2, e64, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-RV64-NEXT:    vsll.vi v9, v9, 3, v0.t
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
+; CHECK-INDEXED-RV64-NEXT:    vluxei64.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-RV64-NEXT:    ret
   %res = call <2 x double> @llvm.masked.expandload.v2f64(ptr align 8 %base, <2 x i1> %mask, <2 x double> %src0)
   ret <2 x double>%res
 }
 
 declare <4 x double> @llvm.masked.expandload.v4f64(ptr, <4 x i1>, <4 x double>)
 define <4 x double> @expandload_v4f64(ptr %base, <4 x double> %src0, <4 x i1> %mask) {
-; RV32-LABEL: expandload_v4f64:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
-; RV32-NEXT:    viota.m v10, v0
-; RV32-NEXT:    vsll.vi v10, v10, 3, v0.t
-; RV32-NEXT:    vsetvli zero, zero, e64, m2, ta, mu
-; RV32-NEXT:    vluxei32.v v8, (a0), v10, v0.t
-; RV32-NEXT:    ret
+; CHECK-VRGATHER-LABEL: expandload_v4f64:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e8, mf4, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e64, m2, ta, ma
+; CHECK-VRGATHER-NEXT:    vle64.v v10, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e64, m2, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v12, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v10, v12, v0.t
+; CHECK-VRGATHER-NEXT:    ret
 ;
-; RV64-LABEL: expandload_v4f64:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 4, e64, m2, ta, ma
-; RV64-NEXT:    viota.m v10, v0
-; RV64-NEXT:    vsll.vi v10, v10, 3, v0.t
-; RV64-NEXT:    vsetvli zero, zero, e64, m2, ta, mu
-; RV64-NEXT:    vluxei64.v v8, (a0), v10, v0.t
-; RV64-NEXT:    ret
+; CHECK-INDEXED-RV32-LABEL: expandload_v4f64:
+; CHECK-INDEXED-RV32:       # %bb.0:
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    viota.m v10, v0
+; CHECK-INDEXED-RV32-NEXT:    vsll.vi v10, v10, 3, v0.t
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e64, m2, ta, mu
+; CHECK-INDEXED-RV32-NEXT:    vluxei32.v v8, (a0), v10, v0.t
+; CHECK-INDEXED-RV32-NEXT:    ret
+;
+; CHECK-INDEXED-RV64-LABEL: expandload_v4f64:
+; CHECK-INDEXED-RV64:       # %bb.0:
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 4, e64, m2, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    viota.m v10, v0
+; CHECK-INDEXED-RV64-NEXT:    vsll.vi v10, v10, 3, v0.t
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e64, m2, ta, mu
+; CHECK-INDEXED-RV64-NEXT:    vluxei64.v v8, (a0), v10, v0.t
+; CHECK-INDEXED-RV64-NEXT:    ret
   %res = call <4 x double> @llvm.masked.expandload.v4f64(ptr align 8 %base, <4 x i1> %mask, <4 x double> %src0)
   ret <4 x double>%res
 }
 
 declare <8 x double> @llvm.masked.expandload.v8f64(ptr, <8 x i1>, <8 x double>)
 define <8 x double> @expandload_v8f64(ptr %base, <8 x double> %src0, <8 x i1> %mask) {
-; RV32-LABEL: expandload_v8f64:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
-; RV32-NEXT:    viota.m v12, v0
-; RV32-NEXT:    vsll.vi v12, v12, 3, v0.t
-; RV32-NEXT:    vsetvli zero, zero, e64, m4, ta, mu
-; RV32-NEXT:    vluxei32.v v8, (a0), v12, v0.t
-; RV32-NEXT:    ret
+; CHECK-VRGATHER-LABEL: expandload_v8f64:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e64, m4, ta, ma
+; CHECK-VRGATHER-NEXT:    vle64.v v12, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e64, m4, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v16, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v12, v16, v0.t
+; CHECK-VRGATHER-NEXT:    ret
+;
+; CHECK-INDEXED-RV32-LABEL: expandload_v8f64:
+; CHECK-INDEXED-RV32:       # %bb.0:
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    viota.m v12, v0
+; CHECK-INDEXED-RV32-NEXT:    vsll.vi v12, v12, 3, v0.t
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e64, m4, ta, mu
+; CHECK-INDEXED-RV32-NEXT:    vluxei32.v v8, (a0), v12, v0.t
+; CHECK-INDEXED-RV32-NEXT:    ret
 ;
-; RV64-LABEL: expandload_v8f64:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 8, e64, m4, ta, ma
-; RV64-NEXT:    viota.m v12, v0
-; RV64-NEXT:    vsll.vi v12, v12, 3, v0.t
-; RV64-NEXT:    vsetvli zero, zero, e64, m4, ta, mu
-; RV64-NEXT:    vluxei64.v v8, (a0), v12, v0.t
-; RV64-NEXT:    ret
+; CHECK-INDEXED-RV64-LABEL: expandload_v8f64:
+; CHECK-INDEXED-RV64:       # %bb.0:
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 8, e64, m4, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    viota.m v12, v0
+; CHECK-INDEXED-RV64-NEXT:    vsll.vi v12, v12, 3, v0.t
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e64, m4, ta, mu
+; CHECK-INDEXED-RV64-NEXT:    vluxei64.v v8, (a0), v12, v0.t
+; CHECK-INDEXED-RV64-NEXT:    ret
   %res = call <8 x double> @llvm.masked.expandload.v8f64(ptr align 8 %base, <8 x i1> %mask, <8 x double> %src0)
   ret <8 x double>%res
 }
+;; NOTE: These prefixes are unused and the list is autogenerated. Do not add tests below this line:
+; CHECK-VRGATHER-RV32: {{.*}}
+; CHECK-VRGATHER-RV64: {{.*}}
diff --git a/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-expandload-int.ll b/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-expandload-int.ll
index 64ae0137beb85c..0fd990fc71344c 100644
--- a/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-expandload-int.ll
+++ b/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-expandload-int.ll
@@ -1,265 +1,440 @@
 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 4
-; RUN: llc -mtriple=riscv32 -mattr=+m,+v -verify-machineinstrs < %s | FileCheck %s --check-prefixes=CHECK,RV32
-; RUN: llc -mtriple=riscv64 -mattr=+m,+v -verify-machineinstrs < %s | FileCheck %s --check-prefixes=CHECK,RV64
+; RUN: llc -verify-machineinstrs -mtriple=riscv32 -mattr=+m,+v %s -o - \
+; RUN:   | FileCheck %s --check-prefixes=CHECK-VRGATHER,CHECK-VRGATHER-RV32
+; RUN: llc -verify-machineinstrs -mtriple=riscv64 -mattr=+m,+v %s -o - \
+; RUN:   | FileCheck %s --check-prefixes=CHECK-VRGATHER,CHECK-VRGATHER-RV64
+; RUN: llc -verify-machineinstrs -mtriple=riscv32 -mattr=+m,+v,+optimized-indexed-load-store %s -o - \
+; RUN:   | FileCheck %s --check-prefixes=CHECK-INDEXED,CHECK-INDEXED-RV32
+; RUN: llc -verify-machineinstrs -mtriple=riscv64 -mattr=+m,+v,+optimized-indexed-load-store %s -o - \
+; RUN:   | FileCheck %s --check-prefixes=CHECK-INDEXED,CHECK-INDEXED-RV64
 
 declare <1 x i8> @llvm.masked.expandload.v1i8(ptr, <1 x i1>, <1 x i8>)
 define <1 x i8> @expandload_v1i8(ptr %base, <1 x i8> %src0, <1 x i1> %mask) {
-; CHECK-LABEL: expandload_v1i8:
-; CHECK:       # %bb.0:
-; CHECK-NEXT:    vsetivli zero, 1, e8, mf8, ta, mu
-; CHECK-NEXT:    viota.m v9, v0
-; CHECK-NEXT:    vluxei8.v v8, (a0), v9, v0.t
-; CHECK-NEXT:    ret
+; CHECK-VRGATHER-LABEL: expandload_v1i8:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e8, mf8, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e8, mf8, ta, ma
+; CHECK-VRGATHER-NEXT:    vle8.v v9, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e8, mf8, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v10, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-VRGATHER-NEXT:    ret
+;
+; CHECK-INDEXED-LABEL: expandload_v1i8:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    vsetivli zero, 1, e8, mf8, ta, mu
+; CHECK-INDEXED-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-NEXT:    vluxei8.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <1 x i8> @llvm.masked.expandload.v1i8(ptr %base, <1 x i1> %mask, <1 x i8> %src0)
   ret <1 x i8>%res
 }
 
 declare <2 x i8> @llvm.masked.expandload.v2i8(ptr, <2 x i1>, <2 x i8>)
 define <2 x i8> @expandload_v2i8(ptr %base, <2 x i8> %src0, <2 x i1> %mask) {
-; CHECK-LABEL: expandload_v2i8:
-; CHECK:       # %bb.0:
-; CHECK-NEXT:    vsetivli zero, 2, e8, mf8, ta, mu
-; CHECK-NEXT:    viota.m v9, v0
-; CHECK-NEXT:    vluxei8.v v8, (a0), v9, v0.t
-; CHECK-NEXT:    ret
+; CHECK-VRGATHER-LABEL: expandload_v2i8:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e8, mf8, ta, ma
+; CHECK-VRGATHER-NEXT:    vle8.v v9, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e8, mf8, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v10, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-VRGATHER-NEXT:    ret
+;
+; CHECK-INDEXED-LABEL: expandload_v2i8:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    vsetivli zero, 2, e8, mf8, ta, mu
+; CHECK-INDEXED-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-NEXT:    vluxei8.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <2 x i8> @llvm.masked.expandload.v2i8(ptr %base, <2 x i1> %mask, <2 x i8> %src0)
   ret <2 x i8>%res
 }
 
 declare <4 x i8> @llvm.masked.expandload.v4i8(ptr, <4 x i1>, <4 x i8>)
 define <4 x i8> @expandload_v4i8(ptr %base, <4 x i8> %src0, <4 x i1> %mask) {
-; CHECK-LABEL: expandload_v4i8:
-; CHECK:       # %bb.0:
-; CHECK-NEXT:    vsetivli zero, 4, e8, mf4, ta, mu
-; CHECK-NEXT:    viota.m v9, v0
-; CHECK-NEXT:    vluxei8.v v8, (a0), v9, v0.t
-; CHECK-NEXT:    ret
+; CHECK-VRGATHER-LABEL: expandload_v4i8:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e8, mf4, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e8, mf4, ta, ma
+; CHECK-VRGATHER-NEXT:    vle8.v v9, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e8, mf4, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v10, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-VRGATHER-NEXT:    ret
+;
+; CHECK-INDEXED-LABEL: expandload_v4i8:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    vsetivli zero, 4, e8, mf4, ta, mu
+; CHECK-INDEXED-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-NEXT:    vluxei8.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <4 x i8> @llvm.masked.expandload.v4i8(ptr %base, <4 x i1> %mask, <4 x i8> %src0)
   ret <4 x i8>%res
 }
 
 declare <8 x i8> @llvm.masked.expandload.v8i8(ptr, <8 x i1>, <8 x i8>)
 define <8 x i8> @expandload_v8i8(ptr %base, <8 x i8> %src0, <8 x i1> %mask) {
-; CHECK-LABEL: expandload_v8i8:
-; CHECK:       # %bb.0:
-; CHECK-NEXT:    vsetivli zero, 8, e8, mf2, ta, mu
-; CHECK-NEXT:    viota.m v9, v0
-; CHECK-NEXT:    vluxei8.v v8, (a0), v9, v0.t
-; CHECK-NEXT:    ret
+; CHECK-VRGATHER-LABEL: expandload_v8i8:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e8, mf2, ta, ma
+; CHECK-VRGATHER-NEXT:    vle8.v v9, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e8, mf2, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v10, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-VRGATHER-NEXT:    ret
+;
+; CHECK-INDEXED-LABEL: expandload_v8i8:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    vsetivli zero, 8, e8, mf2, ta, mu
+; CHECK-INDEXED-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-NEXT:    vluxei8.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <8 x i8> @llvm.masked.expandload.v8i8(ptr %base, <8 x i1> %mask, <8 x i8> %src0)
   ret <8 x i8>%res
 }
 
 declare <1 x i16> @llvm.masked.expandload.v1i16(ptr, <1 x i1>, <1 x i16>)
 define <1 x i16> @expandload_v1i16(ptr %base, <1 x i16> %src0, <1 x i1> %mask) {
-; CHECK-LABEL: expandload_v1i16:
-; CHECK:       # %bb.0:
-; CHECK-NEXT:    vsetivli zero, 1, e16, mf4, ta, ma
-; CHECK-NEXT:    viota.m v9, v0
-; CHECK-NEXT:    vsll.vi v9, v9, 1, v0.t
-; CHECK-NEXT:    vsetvli zero, zero, e16, mf4, ta, mu
-; CHECK-NEXT:    vluxei16.v v8, (a0), v9, v0.t
-; CHECK-NEXT:    ret
+; CHECK-VRGATHER-LABEL: expandload_v1i16:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e8, mf8, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e16, mf4, ta, ma
+; CHECK-VRGATHER-NEXT:    vle16.v v9, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e16, mf4, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v10, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-VRGATHER-NEXT:    ret
+;
+; CHECK-INDEXED-LABEL: expandload_v1i16:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    vsetivli zero, 1, e16, mf4, ta, ma
+; CHECK-INDEXED-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 1, v0.t
+; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e16, mf4, ta, mu
+; CHECK-INDEXED-NEXT:    vluxei16.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <1 x i16> @llvm.masked.expandload.v1i16(ptr align 2 %base, <1 x i1> %mask, <1 x i16> %src0)
   ret <1 x i16>%res
 }
 
 declare <2 x i16> @llvm.masked.expandload.v2i16(ptr, <2 x i1>, <2 x i16>)
 define <2 x i16> @expandload_v2i16(ptr %base, <2 x i16> %src0, <2 x i1> %mask) {
-; CHECK-LABEL: expandload_v2i16:
-; CHECK:       # %bb.0:
-; CHECK-NEXT:    vsetivli zero, 2, e16, mf4, ta, ma
-; CHECK-NEXT:    viota.m v9, v0
-; CHECK-NEXT:    vsll.vi v9, v9, 1, v0.t
-; CHECK-NEXT:    vsetvli zero, zero, e16, mf4, ta, mu
-; CHECK-NEXT:    vluxei16.v v8, (a0), v9, v0.t
-; CHECK-NEXT:    ret
+; CHECK-VRGATHER-LABEL: expandload_v2i16:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e16, mf4, ta, ma
+; CHECK-VRGATHER-NEXT:    vle16.v v9, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e16, mf4, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v10, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-VRGATHER-NEXT:    ret
+;
+; CHECK-INDEXED-LABEL: expandload_v2i16:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    vsetivli zero, 2, e16, mf4, ta, ma
+; CHECK-INDEXED-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 1, v0.t
+; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e16, mf4, ta, mu
+; CHECK-INDEXED-NEXT:    vluxei16.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <2 x i16> @llvm.masked.expandload.v2i16(ptr align 2 %base, <2 x i1> %mask, <2 x i16> %src0)
   ret <2 x i16>%res
 }
 
 declare <4 x i16> @llvm.masked.expandload.v4i16(ptr, <4 x i1>, <4 x i16>)
 define <4 x i16> @expandload_v4i16(ptr %base, <4 x i16> %src0, <4 x i1> %mask) {
-; CHECK-LABEL: expandload_v4i16:
-; CHECK:       # %bb.0:
-; CHECK-NEXT:    vsetivli zero, 4, e16, mf2, ta, ma
-; CHECK-NEXT:    viota.m v9, v0
-; CHECK-NEXT:    vsll.vi v9, v9, 1, v0.t
-; CHECK-NEXT:    vsetvli zero, zero, e16, mf2, ta, mu
-; CHECK-NEXT:    vluxei16.v v8, (a0), v9, v0.t
-; CHECK-NEXT:    ret
+; CHECK-VRGATHER-LABEL: expandload_v4i16:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e8, mf4, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e16, mf2, ta, ma
+; CHECK-VRGATHER-NEXT:    vle16.v v9, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e16, mf2, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v10, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-VRGATHER-NEXT:    ret
+;
+; CHECK-INDEXED-LABEL: expandload_v4i16:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    vsetivli zero, 4, e16, mf2, ta, ma
+; CHECK-INDEXED-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 1, v0.t
+; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e16, mf2, ta, mu
+; CHECK-INDEXED-NEXT:    vluxei16.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <4 x i16> @llvm.masked.expandload.v4i16(ptr align 2 %base, <4 x i1> %mask, <4 x i16> %src0)
   ret <4 x i16>%res
 }
 
 declare <8 x i16> @llvm.masked.expandload.v8i16(ptr, <8 x i1>, <8 x i16>)
 define <8 x i16> @expandload_v8i16(ptr %base, <8 x i16> %src0, <8 x i1> %mask) {
-; CHECK-LABEL: expandload_v8i16:
-; CHECK:       # %bb.0:
-; CHECK-NEXT:    vsetivli zero, 8, e16, m1, ta, ma
-; CHECK-NEXT:    viota.m v9, v0
-; CHECK-NEXT:    vsll.vi v9, v9, 1, v0.t
-; CHECK-NEXT:    vsetvli zero, zero, e16, m1, ta, mu
-; CHECK-NEXT:    vluxei16.v v8, (a0), v9, v0.t
-; CHECK-NEXT:    ret
+; CHECK-VRGATHER-LABEL: expandload_v8i16:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e16, m1, ta, ma
+; CHECK-VRGATHER-NEXT:    vle16.v v9, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e16, m1, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v10, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-VRGATHER-NEXT:    ret
+;
+; CHECK-INDEXED-LABEL: expandload_v8i16:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    vsetivli zero, 8, e16, m1, ta, ma
+; CHECK-INDEXED-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 1, v0.t
+; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e16, m1, ta, mu
+; CHECK-INDEXED-NEXT:    vluxei16.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <8 x i16> @llvm.masked.expandload.v8i16(ptr align 2 %base, <8 x i1> %mask, <8 x i16> %src0)
   ret <8 x i16>%res
 }
 
 declare <1 x i32> @llvm.masked.expandload.v1i32(ptr, <1 x i1>, <1 x i32>)
 define <1 x i32> @expandload_v1i32(ptr %base, <1 x i32> %src0, <1 x i1> %mask) {
-; CHECK-LABEL: expandload_v1i32:
-; CHECK:       # %bb.0:
-; CHECK-NEXT:    vsetivli zero, 1, e32, mf2, ta, ma
-; CHECK-NEXT:    viota.m v9, v0
-; CHECK-NEXT:    vsll.vi v9, v9, 2, v0.t
-; CHECK-NEXT:    vsetvli zero, zero, e32, mf2, ta, mu
-; CHECK-NEXT:    vluxei32.v v8, (a0), v9, v0.t
-; CHECK-NEXT:    ret
+; CHECK-VRGATHER-LABEL: expandload_v1i32:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e8, mf8, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e32, mf2, ta, ma
+; CHECK-VRGATHER-NEXT:    vle32.v v9, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e32, mf2, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v10, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-VRGATHER-NEXT:    ret
+;
+; CHECK-INDEXED-LABEL: expandload_v1i32:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    vsetivli zero, 1, e32, mf2, ta, ma
+; CHECK-INDEXED-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 2, v0.t
+; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e32, mf2, ta, mu
+; CHECK-INDEXED-NEXT:    vluxei32.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <1 x i32> @llvm.masked.expandload.v1i32(ptr align 4 %base, <1 x i1> %mask, <1 x i32> %src0)
   ret <1 x i32>%res
 }
 
 declare <2 x i32> @llvm.masked.expandload.v2i32(ptr, <2 x i1>, <2 x i32>)
 define <2 x i32> @expandload_v2i32(ptr %base, <2 x i32> %src0, <2 x i1> %mask) {
-; CHECK-LABEL: expandload_v2i32:
-; CHECK:       # %bb.0:
-; CHECK-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
-; CHECK-NEXT:    viota.m v9, v0
-; CHECK-NEXT:    vsll.vi v9, v9, 2, v0.t
-; CHECK-NEXT:    vsetvli zero, zero, e32, mf2, ta, mu
-; CHECK-NEXT:    vluxei32.v v8, (a0), v9, v0.t
-; CHECK-NEXT:    ret
+; CHECK-VRGATHER-LABEL: expandload_v2i32:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e32, mf2, ta, ma
+; CHECK-VRGATHER-NEXT:    vle32.v v9, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e32, mf2, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v10, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-VRGATHER-NEXT:    ret
+;
+; CHECK-INDEXED-LABEL: expandload_v2i32:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
+; CHECK-INDEXED-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 2, v0.t
+; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e32, mf2, ta, mu
+; CHECK-INDEXED-NEXT:    vluxei32.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <2 x i32> @llvm.masked.expandload.v2i32(ptr align 4 %base, <2 x i1> %mask, <2 x i32> %src0)
   ret <2 x i32>%res
 }
 
 declare <4 x i32> @llvm.masked.expandload.v4i32(ptr, <4 x i1>, <4 x i32>)
 define <4 x i32> @expandload_v4i32(ptr %base, <4 x i32> %src0, <4 x i1> %mask) {
-; CHECK-LABEL: expandload_v4i32:
-; CHECK:       # %bb.0:
-; CHECK-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
-; CHECK-NEXT:    viota.m v9, v0
-; CHECK-NEXT:    vsll.vi v9, v9, 2, v0.t
-; CHECK-NEXT:    vsetvli zero, zero, e32, m1, ta, mu
-; CHECK-NEXT:    vluxei32.v v8, (a0), v9, v0.t
-; CHECK-NEXT:    ret
+; CHECK-VRGATHER-LABEL: expandload_v4i32:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e8, mf4, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e32, m1, ta, ma
+; CHECK-VRGATHER-NEXT:    vle32.v v9, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e32, m1, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v10, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-VRGATHER-NEXT:    ret
+;
+; CHECK-INDEXED-LABEL: expandload_v4i32:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
+; CHECK-INDEXED-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 2, v0.t
+; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e32, m1, ta, mu
+; CHECK-INDEXED-NEXT:    vluxei32.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <4 x i32> @llvm.masked.expandload.v4i32(ptr align 4 %base, <4 x i1> %mask, <4 x i32> %src0)
   ret <4 x i32>%res
 }
 
 declare <8 x i32> @llvm.masked.expandload.v8i32(ptr, <8 x i1>, <8 x i32>)
 define <8 x i32> @expandload_v8i32(ptr %base, <8 x i32> %src0, <8 x i1> %mask) {
-; CHECK-LABEL: expandload_v8i32:
-; CHECK:       # %bb.0:
-; CHECK-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
-; CHECK-NEXT:    viota.m v10, v0
-; CHECK-NEXT:    vsll.vi v10, v10, 2, v0.t
-; CHECK-NEXT:    vsetvli zero, zero, e32, m2, ta, mu
-; CHECK-NEXT:    vluxei32.v v8, (a0), v10, v0.t
-; CHECK-NEXT:    ret
+; CHECK-VRGATHER-LABEL: expandload_v8i32:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e32, m2, ta, ma
+; CHECK-VRGATHER-NEXT:    vle32.v v10, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e32, m2, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v12, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v10, v12, v0.t
+; CHECK-VRGATHER-NEXT:    ret
+;
+; CHECK-INDEXED-LABEL: expandload_v8i32:
+; CHECK-INDEXED:       # %bb.0:
+; CHECK-INDEXED-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
+; CHECK-INDEXED-NEXT:    viota.m v10, v0
+; CHECK-INDEXED-NEXT:    vsll.vi v10, v10, 2, v0.t
+; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e32, m2, ta, mu
+; CHECK-INDEXED-NEXT:    vluxei32.v v8, (a0), v10, v0.t
+; CHECK-INDEXED-NEXT:    ret
   %res = call <8 x i32> @llvm.masked.expandload.v8i32(ptr align 4 %base, <8 x i1> %mask, <8 x i32> %src0)
   ret <8 x i32>%res
 }
 
 declare <1 x i64> @llvm.masked.expandload.v1i64(ptr, <1 x i1>, <1 x i64>)
 define <1 x i64> @expandload_v1i64(ptr %base, <1 x i64> %src0, <1 x i1> %mask) {
-; RV32-LABEL: expandload_v1i64:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetvli a1, zero, e8, mf8, ta, ma
-; RV32-NEXT:    vfirst.m a1, v0
-; RV32-NEXT:    bnez a1, .LBB12_2
-; RV32-NEXT:  # %bb.1: # %cond.load
-; RV32-NEXT:    addi sp, sp, -16
-; RV32-NEXT:    .cfi_def_cfa_offset 16
-; RV32-NEXT:    lw a1, 0(a0)
-; RV32-NEXT:    lw a0, 4(a0)
-; RV32-NEXT:    sw a1, 8(sp)
-; RV32-NEXT:    sw a0, 12(sp)
-; RV32-NEXT:    addi a0, sp, 8
-; RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; RV32-NEXT:    vlse64.v v8, (a0), zero
-; RV32-NEXT:    addi sp, sp, 16
-; RV32-NEXT:  .LBB12_2: # %else
-; RV32-NEXT:    ret
+; CHECK-VRGATHER-LABEL: expandload_v1i64:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e8, mf8, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
+; CHECK-VRGATHER-NEXT:    vle64.v v9, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e64, m1, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v10, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-VRGATHER-NEXT:    ret
 ;
-; RV64-LABEL: expandload_v1i64:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; RV64-NEXT:    viota.m v9, v0
-; RV64-NEXT:    vsll.vi v9, v9, 3, v0.t
-; RV64-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
-; RV64-NEXT:    vluxei64.v v8, (a0), v9, v0.t
-; RV64-NEXT:    ret
+; CHECK-INDEXED-RV32-LABEL: expandload_v1i64:
+; CHECK-INDEXED-RV32:       # %bb.0:
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e32, mf2, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-RV32-NEXT:    vsll.vi v9, v9, 3, v0.t
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
+; CHECK-INDEXED-RV32-NEXT:    vluxei32.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-RV32-NEXT:    ret
+;
+; CHECK-INDEXED-RV64-LABEL: expandload_v1i64:
+; CHECK-INDEXED-RV64:       # %bb.0:
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-RV64-NEXT:    vsll.vi v9, v9, 3, v0.t
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
+; CHECK-INDEXED-RV64-NEXT:    vluxei64.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-RV64-NEXT:    ret
   %res = call <1 x i64> @llvm.masked.expandload.v1i64(ptr align 8 %base, <1 x i1> %mask, <1 x i64> %src0)
   ret <1 x i64>%res
 }
 
 declare <2 x i64> @llvm.masked.expandload.v2i64(ptr, <2 x i1>, <2 x i64>)
 define <2 x i64> @expandload_v2i64(ptr %base, <2 x i64> %src0, <2 x i1> %mask) {
-; RV32-LABEL: expandload_v2i64:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
-; RV32-NEXT:    viota.m v9, v0
-; RV32-NEXT:    vsll.vi v9, v9, 3, v0.t
-; RV32-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
-; RV32-NEXT:    vluxei32.v v8, (a0), v9, v0.t
-; RV32-NEXT:    ret
+; CHECK-VRGATHER-LABEL: expandload_v2i64:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
+; CHECK-VRGATHER-NEXT:    vle64.v v9, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e64, m1, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v10, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-VRGATHER-NEXT:    ret
+;
+; CHECK-INDEXED-RV32-LABEL: expandload_v2i64:
+; CHECK-INDEXED-RV32:       # %bb.0:
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-RV32-NEXT:    vsll.vi v9, v9, 3, v0.t
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
+; CHECK-INDEXED-RV32-NEXT:    vluxei32.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-RV32-NEXT:    ret
 ;
-; RV64-LABEL: expandload_v2i64:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 2, e64, m1, ta, ma
-; RV64-NEXT:    viota.m v9, v0
-; RV64-NEXT:    vsll.vi v9, v9, 3, v0.t
-; RV64-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
-; RV64-NEXT:    vluxei64.v v8, (a0), v9, v0.t
-; RV64-NEXT:    ret
+; CHECK-INDEXED-RV64-LABEL: expandload_v2i64:
+; CHECK-INDEXED-RV64:       # %bb.0:
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 2, e64, m1, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    viota.m v9, v0
+; CHECK-INDEXED-RV64-NEXT:    vsll.vi v9, v9, 3, v0.t
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
+; CHECK-INDEXED-RV64-NEXT:    vluxei64.v v8, (a0), v9, v0.t
+; CHECK-INDEXED-RV64-NEXT:    ret
   %res = call <2 x i64> @llvm.masked.expandload.v2i64(ptr align 8 %base, <2 x i1> %mask, <2 x i64> %src0)
   ret <2 x i64>%res
 }
 
 declare <4 x i64> @llvm.masked.expandload.v4i64(ptr, <4 x i1>, <4 x i64>)
 define <4 x i64> @expandload_v4i64(ptr %base, <4 x i64> %src0, <4 x i1> %mask) {
-; RV32-LABEL: expandload_v4i64:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
-; RV32-NEXT:    viota.m v10, v0
-; RV32-NEXT:    vsll.vi v10, v10, 3, v0.t
-; RV32-NEXT:    vsetvli zero, zero, e64, m2, ta, mu
-; RV32-NEXT:    vluxei32.v v8, (a0), v10, v0.t
-; RV32-NEXT:    ret
+; CHECK-VRGATHER-LABEL: expandload_v4i64:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e8, mf4, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e64, m2, ta, ma
+; CHECK-VRGATHER-NEXT:    vle64.v v10, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e64, m2, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v12, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v10, v12, v0.t
+; CHECK-VRGATHER-NEXT:    ret
 ;
-; RV64-LABEL: expandload_v4i64:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 4, e64, m2, ta, ma
-; RV64-NEXT:    viota.m v10, v0
-; RV64-NEXT:    vsll.vi v10, v10, 3, v0.t
-; RV64-NEXT:    vsetvli zero, zero, e64, m2, ta, mu
-; RV64-NEXT:    vluxei64.v v8, (a0), v10, v0.t
-; RV64-NEXT:    ret
+; CHECK-INDEXED-RV32-LABEL: expandload_v4i64:
+; CHECK-INDEXED-RV32:       # %bb.0:
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    viota.m v10, v0
+; CHECK-INDEXED-RV32-NEXT:    vsll.vi v10, v10, 3, v0.t
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e64, m2, ta, mu
+; CHECK-INDEXED-RV32-NEXT:    vluxei32.v v8, (a0), v10, v0.t
+; CHECK-INDEXED-RV32-NEXT:    ret
+;
+; CHECK-INDEXED-RV64-LABEL: expandload_v4i64:
+; CHECK-INDEXED-RV64:       # %bb.0:
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 4, e64, m2, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    viota.m v10, v0
+; CHECK-INDEXED-RV64-NEXT:    vsll.vi v10, v10, 3, v0.t
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e64, m2, ta, mu
+; CHECK-INDEXED-RV64-NEXT:    vluxei64.v v8, (a0), v10, v0.t
+; CHECK-INDEXED-RV64-NEXT:    ret
   %res = call <4 x i64> @llvm.masked.expandload.v4i64(ptr align 8 %base, <4 x i1> %mask, <4 x i64> %src0)
   ret <4 x i64>%res
 }
 
 declare <8 x i64> @llvm.masked.expandload.v8i64(ptr, <8 x i1>, <8 x i64>)
 define <8 x i64> @expandload_v8i64(ptr %base, <8 x i64> %src0, <8 x i1> %mask) {
-; RV32-LABEL: expandload_v8i64:
-; RV32:       # %bb.0:
-; RV32-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
-; RV32-NEXT:    viota.m v12, v0
-; RV32-NEXT:    vsll.vi v12, v12, 3, v0.t
-; RV32-NEXT:    vsetvli zero, zero, e64, m4, ta, mu
-; RV32-NEXT:    vluxei32.v v8, (a0), v12, v0.t
-; RV32-NEXT:    ret
+; CHECK-VRGATHER-LABEL: expandload_v8i64:
+; CHECK-VRGATHER:       # %bb.0:
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
+; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
+; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e64, m4, ta, ma
+; CHECK-VRGATHER-NEXT:    vle64.v v12, (a0)
+; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e64, m4, ta, mu
+; CHECK-VRGATHER-NEXT:    viota.m v16, v0
+; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v12, v16, v0.t
+; CHECK-VRGATHER-NEXT:    ret
+;
+; CHECK-INDEXED-RV32-LABEL: expandload_v8i64:
+; CHECK-INDEXED-RV32:       # %bb.0:
+; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
+; CHECK-INDEXED-RV32-NEXT:    viota.m v12, v0
+; CHECK-INDEXED-RV32-NEXT:    vsll.vi v12, v12, 3, v0.t
+; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e64, m4, ta, mu
+; CHECK-INDEXED-RV32-NEXT:    vluxei32.v v8, (a0), v12, v0.t
+; CHECK-INDEXED-RV32-NEXT:    ret
 ;
-; RV64-LABEL: expandload_v8i64:
-; RV64:       # %bb.0:
-; RV64-NEXT:    vsetivli zero, 8, e64, m4, ta, ma
-; RV64-NEXT:    viota.m v12, v0
-; RV64-NEXT:    vsll.vi v12, v12, 3, v0.t
-; RV64-NEXT:    vsetvli zero, zero, e64, m4, ta, mu
-; RV64-NEXT:    vluxei64.v v8, (a0), v12, v0.t
-; RV64-NEXT:    ret
+; CHECK-INDEXED-RV64-LABEL: expandload_v8i64:
+; CHECK-INDEXED-RV64:       # %bb.0:
+; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 8, e64, m4, ta, ma
+; CHECK-INDEXED-RV64-NEXT:    viota.m v12, v0
+; CHECK-INDEXED-RV64-NEXT:    vsll.vi v12, v12, 3, v0.t
+; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e64, m4, ta, mu
+; CHECK-INDEXED-RV64-NEXT:    vluxei64.v v8, (a0), v12, v0.t
+; CHECK-INDEXED-RV64-NEXT:    ret
   %res = call <8 x i64> @llvm.masked.expandload.v8i64(ptr align 8 %base, <8 x i1> %mask, <8 x i64> %src0)
   ret <8 x i64>%res
 }
+;; NOTE: These prefixes are unused and the list is autogenerated. Do not add tests below this line:
+; CHECK-VRGATHER-RV32: {{.*}}
+; CHECK-VRGATHER-RV64: {{.*}}

>From 81473aba0d0ebc521676b65c24f3f10053202626 Mon Sep 17 00:00:00 2001
From: Wang Pengcheng <wangpengcheng.pp at bytedance.com>
Date: Tue, 29 Oct 2024 15:52:19 +0800
Subject: [PATCH 13/15] Add a comment to describe TuneOptimizedIndexedLoadStore

---
 llvm/lib/Target/RISCV/RISCVFeatures.td | 7 +++++++
 1 file changed, 7 insertions(+)

diff --git a/llvm/lib/Target/RISCV/RISCVFeatures.td b/llvm/lib/Target/RISCV/RISCVFeatures.td
index 88bad0e7920a6d..6b95b529a3c85d 100644
--- a/llvm/lib/Target/RISCV/RISCVFeatures.td
+++ b/llvm/lib/Target/RISCV/RISCVFeatures.td
@@ -1359,6 +1359,13 @@ def TuneOptimizedZeroStrideLoad
                       "true", "Optimized (perform fewer memory operations)"
                       "zero-stride vector load">;
 
+// There are two ways to synthesize expandload:
+// 1. Using vrgather.vv: vcpop.m + vleN.v + viota.m + vrgather.vv
+// 2. Using indexed load: viota.m + vsll.vi(optional) + vluxeiN.v
+//
+// If the target has an optimized implementation of vector indexed load and has
+// a better performance when using the indexed load way instead of the vrgather.vv
+// way, the target should set this feature.
 def TuneOptimizedIndexedLoadStore
    : SubtargetFeature<"optimized-indexed-load-store", "HasOptimizedIndexedLoadStore",
                       "true", "Optimized vector indexed load/store">;

>From 2d51286bd2abb4e940ddb92d88a4cec26b931aa5 Mon Sep 17 00:00:00 2001
From: Wang Pengcheng <wangpengcheng.pp at bytedance.com>
Date: Wed, 30 Oct 2024 16:04:46 +0800
Subject: [PATCH 14/15] Revert to use vrgather.vv lowering

---
 llvm/lib/Target/RISCV/RISCVFeatures.td        |    11 -
 llvm/lib/Target/RISCV/RISCVISelLowering.cpp   |   127 +-
 llvm/test/CodeGen/RISCV/rvv/expandload.ll     | 58685 ++++++----------
 .../RISCV/rvv/fixed-vectors-expandload-fp.ll  |   396 +-
 .../RISCV/rvv/fixed-vectors-expandload-int.ll |   504 +-
 5 files changed, 20022 insertions(+), 39701 deletions(-)

diff --git a/llvm/lib/Target/RISCV/RISCVFeatures.td b/llvm/lib/Target/RISCV/RISCVFeatures.td
index 6b95b529a3c85d..3d0e1dae801d39 100644
--- a/llvm/lib/Target/RISCV/RISCVFeatures.td
+++ b/llvm/lib/Target/RISCV/RISCVFeatures.td
@@ -1359,17 +1359,6 @@ def TuneOptimizedZeroStrideLoad
                       "true", "Optimized (perform fewer memory operations)"
                       "zero-stride vector load">;
 
-// There are two ways to synthesize expandload:
-// 1. Using vrgather.vv: vcpop.m + vleN.v + viota.m + vrgather.vv
-// 2. Using indexed load: viota.m + vsll.vi(optional) + vluxeiN.v
-//
-// If the target has an optimized implementation of vector indexed load and has
-// a better performance when using the indexed load way instead of the vrgather.vv
-// way, the target should set this feature.
-def TuneOptimizedIndexedLoadStore
-   : SubtargetFeature<"optimized-indexed-load-store", "HasOptimizedIndexedLoadStore",
-                      "true", "Optimized vector indexed load/store">;
-
 def Experimental
    : SubtargetFeature<"experimental", "HasExperimental",
                       "true", "Experimental intrinsics">;
diff --git a/llvm/lib/Target/RISCV/RISCVISelLowering.cpp b/llvm/lib/Target/RISCV/RISCVISelLowering.cpp
index 4f70b7e9b49653..7c16798df638b9 100644
--- a/llvm/lib/Target/RISCV/RISCVISelLowering.cpp
+++ b/llvm/lib/Target/RISCV/RISCVISelLowering.cpp
@@ -11136,17 +11136,41 @@ SDValue RISCVTargetLowering::lowerMaskedLoad(SDValue Op,
   if (!VL)
     VL = getDefaultVLOps(VT, ContainerVT, DL, DAG, Subtarget).second;
 
-  SDValue Result;
-  if (!IsUnmasked && IsExpandingLoad &&
-      Subtarget.hasOptimizedIndexedLoadStore()) {
+  SDValue ExpandingVL;
+  if (!IsUnmasked && IsExpandingLoad) {
+    ExpandingVL = VL;
+    VL =
+        DAG.getNode(RISCVISD::VCPOP_VL, DL, XLenVT, Mask,
+                    getAllOnesMask(Mask.getSimpleValueType(), VL, DL, DAG), VL);
+  }
+
+  unsigned IntID = IsUnmasked || IsExpandingLoad
+                       ? Intrinsic::riscv_vle
+                       : Intrinsic::riscv_vle_mask;
+  SmallVector<SDValue, 8> Ops{Chain, DAG.getTargetConstant(IntID, DL, XLenVT)};
+  if (IntID == Intrinsic::riscv_vle)
+    Ops.push_back(DAG.getUNDEF(ContainerVT));
+  else
+    Ops.push_back(PassThru);
+  Ops.push_back(BasePtr);
+  if (IntID == Intrinsic::riscv_vle_mask)
+    Ops.push_back(Mask);
+  Ops.push_back(VL);
+  if (IntID == Intrinsic::riscv_vle_mask)
+    Ops.push_back(DAG.getTargetConstant(RISCVII::TAIL_AGNOSTIC, DL, XLenVT));
+
+  SDVTList VTs = DAG.getVTList({ContainerVT, MVT::Other});
+
+  SDValue Result =
+      DAG.getMemIntrinsicNode(ISD::INTRINSIC_W_CHAIN, DL, VTs, Ops, MemVT, MMO);
+  Chain = Result.getValue(1);
+  if (ExpandingVL) {
     MVT IndexVT = ContainerVT;
     if (ContainerVT.isFloatingPoint())
-      IndexVT = IndexVT.changeVectorElementTypeToInteger();
+      IndexVT = ContainerVT.changeVectorElementTypeToInteger();
 
     MVT IndexEltVT = IndexVT.getVectorElementType();
-    if (Subtarget.isRV32() && IndexEltVT.bitsGT(XLenVT))
-      IndexVT = IndexVT.changeVectorElementType(XLenVT);
-
+    bool UseVRGATHEREI16 = false;
     // If index vector is an i8 vector and the element count exceeds 256, we
     // should change the element type of index vector to i16 to avoid
     // overflow.
@@ -11155,90 +11179,17 @@ SDValue RISCVTargetLowering::lowerMaskedLoad(SDValue Op,
       if (getLMUL(IndexVT) == RISCVII::LMUL_8)
         return SDValue();
       IndexVT = IndexVT.changeVectorElementType(MVT::i16);
+      UseVRGATHEREI16 = true;
     }
 
-    SDValue Index =
+    SDValue Iota =
         DAG.getNode(ISD::INTRINSIC_WO_CHAIN, DL, IndexVT,
-                    DAG.getTargetConstant(Intrinsic::riscv_viota, DL, XLenVT),
-                    DAG.getUNDEF(IndexVT), Mask, VL);
-    if (uint64_t EltSize = ContainerVT.getScalarSizeInBits(); EltSize > 8)
-      Index = DAG.getNode(RISCVISD::SHL_VL, DL, IndexVT, Index,
-                          DAG.getConstant(Log2_64(EltSize / 8), DL, IndexVT),
-                          DAG.getUNDEF(IndexVT), Mask, VL);
-    unsigned IntID = Intrinsic::riscv_vluxei_mask;
-    SmallVector<SDValue, 8> Ops{Chain,
-                                DAG.getTargetConstant(IntID, DL, XLenVT)};
-    Ops.push_back(PassThru);
-    Ops.push_back(BasePtr);
-    Ops.push_back(Index);
-    Ops.push_back(Mask);
-    Ops.push_back(VL);
-    Ops.push_back(DAG.getTargetConstant(RISCVII::TAIL_AGNOSTIC, DL, XLenVT));
-
-    SDVTList VTs = DAG.getVTList({ContainerVT, MVT::Other});
-
-    Result = DAG.getMemIntrinsicNode(ISD::INTRINSIC_W_CHAIN, DL, VTs, Ops,
-                                     MemVT, MMO);
-    Chain = Result.getValue(1);
-  } else {
-    SDValue ExpandingVL;
-    if (!IsUnmasked && IsExpandingLoad &&
-        !Subtarget.hasOptimizedIndexedLoadStore()) {
-      ExpandingVL = VL;
-      VL = DAG.getNode(RISCVISD::VCPOP_VL, DL, XLenVT, Mask,
-                       getAllOnesMask(Mask.getSimpleValueType(), VL, DL, DAG),
-                       VL);
-    }
-
-    unsigned IntID = IsUnmasked || (IsExpandingLoad &&
-                                    !Subtarget.hasOptimizedIndexedLoadStore())
-                         ? Intrinsic::riscv_vle
-                         : Intrinsic::riscv_vle_mask;
-    SmallVector<SDValue, 8> Ops{Chain,
-                                DAG.getTargetConstant(IntID, DL, XLenVT)};
-    if (IntID == Intrinsic::riscv_vle)
-      Ops.push_back(DAG.getUNDEF(ContainerVT));
-    else
-      Ops.push_back(PassThru);
-    Ops.push_back(BasePtr);
-    if (IntID == Intrinsic::riscv_vle_mask)
-      Ops.push_back(Mask);
-    Ops.push_back(VL);
-    if (IntID == Intrinsic::riscv_vle_mask)
-      Ops.push_back(DAG.getTargetConstant(RISCVII::TAIL_AGNOSTIC, DL, XLenVT));
-
-    SDVTList VTs = DAG.getVTList({ContainerVT, MVT::Other});
-
-    Result = DAG.getMemIntrinsicNode(ISD::INTRINSIC_W_CHAIN, DL, VTs, Ops,
-                                     MemVT, MMO);
-    Chain = Result.getValue(1);
-    if (ExpandingVL) {
-      MVT IndexVT = ContainerVT;
-      if (ContainerVT.isFloatingPoint())
-        IndexVT = ContainerVT.changeVectorElementTypeToInteger();
-
-      MVT IndexEltVT = IndexVT.getVectorElementType();
-      bool UseVRGATHEREI16 = false;
-      // If index vector is an i8 vector and the element count exceeds 256, we
-      // should change the element type of index vector to i16 to avoid
-      // overflow.
-      if (IndexEltVT == MVT::i8 && VT.getVectorNumElements() > 256) {
-        // FIXME: We need to do vector splitting manually for LMUL=8 cases.
-        if (getLMUL(IndexVT) == RISCVII::LMUL_8)
-          return SDValue();
-        IndexVT = IndexVT.changeVectorElementType(MVT::i16);
-        UseVRGATHEREI16 = true;
-      }
-
-      SDValue Iota =
-          DAG.getNode(ISD::INTRINSIC_WO_CHAIN, DL, IndexVT,
-                      DAG.getConstant(Intrinsic::riscv_viota, DL, XLenVT),
-                      DAG.getUNDEF(IndexVT), Mask, ExpandingVL);
-      Result = DAG.getNode(UseVRGATHEREI16 ? RISCVISD::VRGATHEREI16_VV_VL
-                                           : RISCVISD::VRGATHER_VV_VL,
-                           DL, ContainerVT, Result, Iota, PassThru, Mask,
-                           ExpandingVL);
-    }
+                    DAG.getConstant(Intrinsic::riscv_viota, DL, XLenVT),
+                    DAG.getUNDEF(IndexVT), Mask, ExpandingVL);
+    Result =
+        DAG.getNode(UseVRGATHEREI16 ? RISCVISD::VRGATHEREI16_VV_VL
+                                    : RISCVISD::VRGATHER_VV_VL,
+                    DL, ContainerVT, Result, Iota, PassThru, Mask, ExpandingVL);
   }
 
   if (VT.isFixedLengthVector())
diff --git a/llvm/test/CodeGen/RISCV/rvv/expandload.ll b/llvm/test/CodeGen/RISCV/rvv/expandload.ll
index b087b48061c8ec..65df96bef2ad1f 100644
--- a/llvm/test/CodeGen/RISCV/rvv/expandload.ll
+++ b/llvm/test/CodeGen/RISCV/rvv/expandload.ll
@@ -1,33 +1,22 @@
 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 5
 ; RUN: llc -verify-machineinstrs -mtriple=riscv32 -mattr=+v,+d,+m,+zbb %s -o - \
-; RUN:   | FileCheck %s --check-prefixes=CHECK,CHECK-VRGATHER,CHECK-VRGATHER-RV32
+; RUN:   | FileCheck %s --check-prefixes=CHECK,CHECK-RV32
 ; RUN: llc -verify-machineinstrs -mtriple=riscv64 -mattr=+v,+d,+m,+zbb %s -o - \
-; RUN:   | FileCheck %s --check-prefixes=CHECK,CHECK-VRGATHER,CHECK-VRGATHER-RV64
-; RUN: llc -verify-machineinstrs -mtriple=riscv32 -mattr=+v,+d,+m,+zbb,+optimized-indexed-load-store %s -o - \
-; RUN:   | FileCheck %s --check-prefixes=CHECK,CHECK-INDEXED,CHECK-INDEXED-RV32
-; RUN: llc -verify-machineinstrs -mtriple=riscv64 -mattr=+v,+d,+m,+zbb,+optimized-indexed-load-store %s -o - \
-; RUN:   | FileCheck %s --check-prefixes=CHECK,CHECK-INDEXED,CHECK-INDEXED-RV64
+; RUN:   | FileCheck %s --check-prefixes=CHECK,CHECK-RV64
 
 ; Load + expand for i8 type
 
 define <1 x i8> @test_expandload_v1i8(ptr %base, <1 x i1> %mask, <1 x i8> %passthru) {
-; CHECK-VRGATHER-LABEL: test_expandload_v1i8:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e8, mf8, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e8, mf8, ta, ma
-; CHECK-VRGATHER-NEXT:    vle8.v v9, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e8, mf8, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v10, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: test_expandload_v1i8:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    vsetivli zero, 1, e8, mf8, ta, mu
-; CHECK-INDEXED-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-NEXT:    vluxei8.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: test_expandload_v1i8:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 1, e8, mf8, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e8, mf8, ta, ma
+; CHECK-NEXT:    vle8.v v9, (a0)
+; CHECK-NEXT:    vsetivli zero, 1, e8, mf8, ta, mu
+; CHECK-NEXT:    viota.m v10, v0
+; CHECK-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-NEXT:    ret
   %res = call <1 x i8> @llvm.masked.expandload.v1i8(ptr align 1 %base, <1 x i1> %mask, <1 x i8> %passthru)
   ret <1 x i8> %res
 }
@@ -43,23 +32,16 @@ define <1 x i8> @test_expandload_v1i8_all_ones(ptr %base, <1 x i8> %passthru) {
 }
 
 define <2 x i8> @test_expandload_v2i8(ptr %base, <2 x i1> %mask, <2 x i8> %passthru) {
-; CHECK-VRGATHER-LABEL: test_expandload_v2i8:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e8, mf8, ta, ma
-; CHECK-VRGATHER-NEXT:    vle8.v v9, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e8, mf8, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v10, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: test_expandload_v2i8:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    vsetivli zero, 2, e8, mf8, ta, mu
-; CHECK-INDEXED-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-NEXT:    vluxei8.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: test_expandload_v2i8:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e8, mf8, ta, ma
+; CHECK-NEXT:    vle8.v v9, (a0)
+; CHECK-NEXT:    vsetivli zero, 2, e8, mf8, ta, mu
+; CHECK-NEXT:    viota.m v10, v0
+; CHECK-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-NEXT:    ret
   %res = call <2 x i8> @llvm.masked.expandload.v2i8(ptr align 1 %base, <2 x i1> %mask, <2 x i8> %passthru)
   ret <2 x i8> %res
 }
@@ -75,23 +57,16 @@ define <2 x i8> @test_expandload_v2i8_all_ones(ptr %base, <2 x i8> %passthru) {
 }
 
 define <4 x i8> @test_expandload_v4i8(ptr %base, <4 x i1> %mask, <4 x i8> %passthru) {
-; CHECK-VRGATHER-LABEL: test_expandload_v4i8:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e8, mf4, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e8, mf4, ta, ma
-; CHECK-VRGATHER-NEXT:    vle8.v v9, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e8, mf4, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v10, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: test_expandload_v4i8:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    vsetivli zero, 4, e8, mf4, ta, mu
-; CHECK-INDEXED-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-NEXT:    vluxei8.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: test_expandload_v4i8:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 4, e8, mf4, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e8, mf4, ta, ma
+; CHECK-NEXT:    vle8.v v9, (a0)
+; CHECK-NEXT:    vsetivli zero, 4, e8, mf4, ta, mu
+; CHECK-NEXT:    viota.m v10, v0
+; CHECK-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-NEXT:    ret
   %res = call <4 x i8> @llvm.masked.expandload.v4i8(ptr align 1 %base, <4 x i1> %mask, <4 x i8> %passthru)
   ret <4 x i8> %res
 }
@@ -107,23 +82,16 @@ define <4 x i8> @test_expandload_v4i8_all_ones(ptr %base, <4 x i8> %passthru) {
 }
 
 define <8 x i8> @test_expandload_v8i8(ptr %base, <8 x i1> %mask, <8 x i8> %passthru) {
-; CHECK-VRGATHER-LABEL: test_expandload_v8i8:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e8, mf2, ta, ma
-; CHECK-VRGATHER-NEXT:    vle8.v v9, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e8, mf2, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v10, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: test_expandload_v8i8:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    vsetivli zero, 8, e8, mf2, ta, mu
-; CHECK-INDEXED-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-NEXT:    vluxei8.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: test_expandload_v8i8:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e8, mf2, ta, ma
+; CHECK-NEXT:    vle8.v v9, (a0)
+; CHECK-NEXT:    vsetivli zero, 8, e8, mf2, ta, mu
+; CHECK-NEXT:    viota.m v10, v0
+; CHECK-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-NEXT:    ret
   %res = call <8 x i8> @llvm.masked.expandload.v8i8(ptr align 1 %base, <8 x i1> %mask, <8 x i8> %passthru)
   ret <8 x i8> %res
 }
@@ -139,23 +107,16 @@ define <8 x i8> @test_expandload_v8i8_all_ones(ptr %base, <8 x i8> %passthru) {
 }
 
 define <16 x i8> @test_expandload_v16i8(ptr %base, <16 x i1> %mask, <16 x i8> %passthru) {
-; CHECK-VRGATHER-LABEL: test_expandload_v16i8:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 16, e8, m1, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e8, m1, ta, ma
-; CHECK-VRGATHER-NEXT:    vle8.v v9, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 16, e8, m1, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v10, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: test_expandload_v16i8:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    vsetivli zero, 16, e8, m1, ta, mu
-; CHECK-INDEXED-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-NEXT:    vluxei8.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: test_expandload_v16i8:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 16, e8, m1, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e8, m1, ta, ma
+; CHECK-NEXT:    vle8.v v9, (a0)
+; CHECK-NEXT:    vsetivli zero, 16, e8, m1, ta, mu
+; CHECK-NEXT:    viota.m v10, v0
+; CHECK-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-NEXT:    ret
   %res = call <16 x i8> @llvm.masked.expandload.v16i8(ptr align 1 %base, <16 x i1> %mask, <16 x i8> %passthru)
   ret <16 x i8> %res
 }
@@ -171,25 +132,17 @@ define <16 x i8> @test_expandload_v16i8_all_ones(ptr %base, <16 x i8> %passthru)
 }
 
 define <32 x i8> @test_expandload_v32i8(ptr %base, <32 x i1> %mask, <32 x i8> %passthru) {
-; CHECK-VRGATHER-LABEL: test_expandload_v32i8:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    li a1, 32
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e8, m2, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a2, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a2, e8, m2, ta, ma
-; CHECK-VRGATHER-NEXT:    vle8.v v10, (a0)
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e8, m2, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v12, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v10, v12, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: test_expandload_v32i8:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    li a1, 32
-; CHECK-INDEXED-NEXT:    vsetvli zero, a1, e8, m2, ta, mu
-; CHECK-INDEXED-NEXT:    viota.m v10, v0
-; CHECK-INDEXED-NEXT:    vluxei8.v v8, (a0), v10, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: test_expandload_v32i8:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    li a1, 32
+; CHECK-NEXT:    vsetvli zero, a1, e8, m2, ta, ma
+; CHECK-NEXT:    vcpop.m a2, v0
+; CHECK-NEXT:    vsetvli zero, a2, e8, m2, ta, ma
+; CHECK-NEXT:    vle8.v v10, (a0)
+; CHECK-NEXT:    vsetvli zero, a1, e8, m2, ta, mu
+; CHECK-NEXT:    viota.m v12, v0
+; CHECK-NEXT:    vrgather.vv v8, v10, v12, v0.t
+; CHECK-NEXT:    ret
   %res = call <32 x i8> @llvm.masked.expandload.v32i8(ptr align 1 %base, <32 x i1> %mask, <32 x i8> %passthru)
   ret <32 x i8> %res
 }
@@ -206,25 +159,17 @@ define <32 x i8> @test_expandload_v32i8_all_ones(ptr %base, <32 x i8> %passthru)
 }
 
 define <64 x i8> @test_expandload_v64i8(ptr %base, <64 x i1> %mask, <64 x i8> %passthru) {
-; CHECK-VRGATHER-LABEL: test_expandload_v64i8:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    li a1, 64
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e8, m4, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a2, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a2, e8, m4, ta, ma
-; CHECK-VRGATHER-NEXT:    vle8.v v12, (a0)
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e8, m4, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v16, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v12, v16, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: test_expandload_v64i8:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    li a1, 64
-; CHECK-INDEXED-NEXT:    vsetvli zero, a1, e8, m4, ta, mu
-; CHECK-INDEXED-NEXT:    viota.m v12, v0
-; CHECK-INDEXED-NEXT:    vluxei8.v v8, (a0), v12, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: test_expandload_v64i8:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    li a1, 64
+; CHECK-NEXT:    vsetvli zero, a1, e8, m4, ta, ma
+; CHECK-NEXT:    vcpop.m a2, v0
+; CHECK-NEXT:    vsetvli zero, a2, e8, m4, ta, ma
+; CHECK-NEXT:    vle8.v v12, (a0)
+; CHECK-NEXT:    vsetvli zero, a1, e8, m4, ta, mu
+; CHECK-NEXT:    viota.m v16, v0
+; CHECK-NEXT:    vrgather.vv v8, v12, v16, v0.t
+; CHECK-NEXT:    ret
   %res = call <64 x i8> @llvm.masked.expandload.v64i8(ptr align 1 %base, <64 x i1> %mask, <64 x i8> %passthru)
   ret <64 x i8> %res
 }
@@ -241,25 +186,17 @@ define <64 x i8> @test_expandload_v64i8_all_ones(ptr %base, <64 x i8> %passthru)
 }
 
 define <128 x i8> @test_expandload_v128i8(ptr %base, <128 x i1> %mask, <128 x i8> %passthru) {
-; CHECK-VRGATHER-LABEL: test_expandload_v128i8:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    li a1, 128
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a2, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a2, e8, m8, ta, ma
-; CHECK-VRGATHER-NEXT:    vle8.v v16, (a0)
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e8, m8, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v24, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v16, v24, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: test_expandload_v128i8:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    li a1, 128
-; CHECK-INDEXED-NEXT:    vsetvli zero, a1, e8, m8, ta, mu
-; CHECK-INDEXED-NEXT:    viota.m v16, v0
-; CHECK-INDEXED-NEXT:    vluxei8.v v8, (a0), v16, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: test_expandload_v128i8:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    li a1, 128
+; CHECK-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
+; CHECK-NEXT:    vcpop.m a2, v0
+; CHECK-NEXT:    vsetvli zero, a2, e8, m8, ta, ma
+; CHECK-NEXT:    vle8.v v16, (a0)
+; CHECK-NEXT:    vsetvli zero, a1, e8, m8, ta, mu
+; CHECK-NEXT:    viota.m v24, v0
+; CHECK-NEXT:    vrgather.vv v8, v16, v24, v0.t
+; CHECK-NEXT:    ret
   %res = call <128 x i8> @llvm.masked.expandload.v128i8(ptr align 1 %base, <128 x i1> %mask, <128 x i8> %passthru)
   ret <128 x i8> %res
 }
@@ -276,408 +213,260 @@ define <128 x i8> @test_expandload_v128i8_all_ones(ptr %base, <128 x i8> %passth
 }
 
 define <256 x i8> @test_expandload_v256i8(ptr %base, <256 x i1> %mask, <256 x i8> %passthru) {
-; CHECK-VRGATHER-RV32-LABEL: test_expandload_v256i8:
-; CHECK-VRGATHER-RV32:       # %bb.0:
-; CHECK-VRGATHER-RV32-NEXT:    addi sp, sp, -16
-; CHECK-VRGATHER-RV32-NEXT:    .cfi_def_cfa_offset 16
-; CHECK-VRGATHER-RV32-NEXT:    csrr a2, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a2, 5
-; CHECK-VRGATHER-RV32-NEXT:    sub sp, sp, a2
-; CHECK-VRGATHER-RV32-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x20, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 32 * vlenb
-; CHECK-VRGATHER-RV32-NEXT:    csrr a2, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 24
-; CHECK-VRGATHER-RV32-NEXT:    mul a2, a2, a3
-; CHECK-VRGATHER-RV32-NEXT:    add a2, sp, a2
-; CHECK-VRGATHER-RV32-NEXT:    addi a2, a2, 16
-; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v16, (a2) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v7, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 128
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vle8.v v8, (a1)
-; CHECK-VRGATHER-RV32-NEXT:    csrr a1, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a1, 3
-; CHECK-VRGATHER-RV32-NEXT:    add a1, sp, a1
-; CHECK-VRGATHER-RV32-NEXT:    addi a1, a1, 16
-; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v8, (a1) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslidedown.vi v9, v0, 1
-; CHECK-VRGATHER-RV32-NEXT:    li a1, 32
-; CHECK-VRGATHER-RV32-NEXT:    vsrl.vx v10, v9, a1
-; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a3, v10
-; CHECK-VRGATHER-RV32-NEXT:    vsrl.vx v10, v0, a1
-; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a1, v10
-; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a4, v9
-; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a5, v0
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vcpop.m a6, v0
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a6, e8, m8, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vle8.v v8, (a0)
-; CHECK-VRGATHER-RV32-NEXT:    csrr a6, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    slli a6, a6, 4
-; CHECK-VRGATHER-RV32-NEXT:    add a6, sp, a6
-; CHECK-VRGATHER-RV32-NEXT:    addi a6, a6, 16
-; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v8, (a6) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV32-NEXT:    cpop a1, a1
-; CHECK-VRGATHER-RV32-NEXT:    cpop a5, a5
-; CHECK-VRGATHER-RV32-NEXT:    add a1, a5, a1
-; CHECK-VRGATHER-RV32-NEXT:    cpop a3, a3
-; CHECK-VRGATHER-RV32-NEXT:    cpop a4, a4
-; CHECK-VRGATHER-RV32-NEXT:    add a3, a4, a3
-; CHECK-VRGATHER-RV32-NEXT:    add a1, a1, a3
-; CHECK-VRGATHER-RV32-NEXT:    add a0, a0, a1
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vcpop.m a1, v7
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vle8.v v8, (a0)
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, sp, 16
-; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v8, (a0) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, ta, mu
-; CHECK-VRGATHER-RV32-NEXT:    viota.m v16, v0
-; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 4
-; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    li a1, 24
-; CHECK-VRGATHER-RV32-NEXT:    mul a0, a0, a1
-; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV32-NEXT:    vrgather.vv v8, v24, v16, v0.t
-; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    li a1, 24
-; CHECK-VRGATHER-RV32-NEXT:    mul a0, a0, a1
-; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v8, (a0) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV32-NEXT:    viota.m v16, v7
-; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 4
-; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v0, v7
-; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 3
-; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, sp, 16
-; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 4
-; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV32-NEXT:    vrgather.vv v16, v24, v8, v0.t
-; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    li a1, 24
-; CHECK-VRGATHER-RV32-NEXT:    mul a0, a0, a1
-; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 5
-; CHECK-VRGATHER-RV32-NEXT:    add sp, sp, a0
-; CHECK-VRGATHER-RV32-NEXT:    addi sp, sp, 16
-; CHECK-VRGATHER-RV32-NEXT:    ret
+; CHECK-RV32-LABEL: test_expandload_v256i8:
+; CHECK-RV32:       # %bb.0:
+; CHECK-RV32-NEXT:    addi sp, sp, -16
+; CHECK-RV32-NEXT:    .cfi_def_cfa_offset 16
+; CHECK-RV32-NEXT:    csrr a2, vlenb
+; CHECK-RV32-NEXT:    slli a2, a2, 5
+; CHECK-RV32-NEXT:    sub sp, sp, a2
+; CHECK-RV32-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x20, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 32 * vlenb
+; CHECK-RV32-NEXT:    csrr a2, vlenb
+; CHECK-RV32-NEXT:    li a3, 24
+; CHECK-RV32-NEXT:    mul a2, a2, a3
+; CHECK-RV32-NEXT:    add a2, sp, a2
+; CHECK-RV32-NEXT:    addi a2, a2, 16
+; CHECK-RV32-NEXT:    vs8r.v v16, (a2) # Unknown-size Folded Spill
+; CHECK-RV32-NEXT:    vmv1r.v v7, v8
+; CHECK-RV32-NEXT:    li a2, 128
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, ta, ma
+; CHECK-RV32-NEXT:    vle8.v v8, (a1)
+; CHECK-RV32-NEXT:    csrr a1, vlenb
+; CHECK-RV32-NEXT:    slli a1, a1, 3
+; CHECK-RV32-NEXT:    add a1, sp, a1
+; CHECK-RV32-NEXT:    addi a1, a1, 16
+; CHECK-RV32-NEXT:    vs8r.v v8, (a1) # Unknown-size Folded Spill
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vslidedown.vi v9, v0, 1
+; CHECK-RV32-NEXT:    li a1, 32
+; CHECK-RV32-NEXT:    vsrl.vx v10, v9, a1
+; CHECK-RV32-NEXT:    vmv.x.s a3, v10
+; CHECK-RV32-NEXT:    vsrl.vx v10, v0, a1
+; CHECK-RV32-NEXT:    vmv.x.s a1, v10
+; CHECK-RV32-NEXT:    vmv.x.s a4, v9
+; CHECK-RV32-NEXT:    vmv.x.s a5, v0
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, ta, ma
+; CHECK-RV32-NEXT:    vcpop.m a6, v0
+; CHECK-RV32-NEXT:    vsetvli zero, a6, e8, m8, ta, ma
+; CHECK-RV32-NEXT:    vle8.v v8, (a0)
+; CHECK-RV32-NEXT:    csrr a6, vlenb
+; CHECK-RV32-NEXT:    slli a6, a6, 4
+; CHECK-RV32-NEXT:    add a6, sp, a6
+; CHECK-RV32-NEXT:    addi a6, a6, 16
+; CHECK-RV32-NEXT:    vs8r.v v8, (a6) # Unknown-size Folded Spill
+; CHECK-RV32-NEXT:    cpop a1, a1
+; CHECK-RV32-NEXT:    cpop a5, a5
+; CHECK-RV32-NEXT:    add a1, a5, a1
+; CHECK-RV32-NEXT:    cpop a3, a3
+; CHECK-RV32-NEXT:    cpop a4, a4
+; CHECK-RV32-NEXT:    add a3, a4, a3
+; CHECK-RV32-NEXT:    add a1, a1, a3
+; CHECK-RV32-NEXT:    add a0, a0, a1
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, ta, ma
+; CHECK-RV32-NEXT:    vcpop.m a1, v7
+; CHECK-RV32-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
+; CHECK-RV32-NEXT:    vle8.v v8, (a0)
+; CHECK-RV32-NEXT:    addi a0, sp, 16
+; CHECK-RV32-NEXT:    vs8r.v v8, (a0) # Unknown-size Folded Spill
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, ta, mu
+; CHECK-RV32-NEXT:    viota.m v16, v0
+; CHECK-RV32-NEXT:    csrr a0, vlenb
+; CHECK-RV32-NEXT:    slli a0, a0, 4
+; CHECK-RV32-NEXT:    add a0, sp, a0
+; CHECK-RV32-NEXT:    addi a0, a0, 16
+; CHECK-RV32-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
+; CHECK-RV32-NEXT:    csrr a0, vlenb
+; CHECK-RV32-NEXT:    li a1, 24
+; CHECK-RV32-NEXT:    mul a0, a0, a1
+; CHECK-RV32-NEXT:    add a0, sp, a0
+; CHECK-RV32-NEXT:    addi a0, a0, 16
+; CHECK-RV32-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
+; CHECK-RV32-NEXT:    vrgather.vv v8, v24, v16, v0.t
+; CHECK-RV32-NEXT:    csrr a0, vlenb
+; CHECK-RV32-NEXT:    li a1, 24
+; CHECK-RV32-NEXT:    mul a0, a0, a1
+; CHECK-RV32-NEXT:    add a0, sp, a0
+; CHECK-RV32-NEXT:    addi a0, a0, 16
+; CHECK-RV32-NEXT:    vs8r.v v8, (a0) # Unknown-size Folded Spill
+; CHECK-RV32-NEXT:    viota.m v16, v7
+; CHECK-RV32-NEXT:    csrr a0, vlenb
+; CHECK-RV32-NEXT:    slli a0, a0, 4
+; CHECK-RV32-NEXT:    add a0, sp, a0
+; CHECK-RV32-NEXT:    addi a0, a0, 16
+; CHECK-RV32-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
+; CHECK-RV32-NEXT:    vmv1r.v v0, v7
+; CHECK-RV32-NEXT:    csrr a0, vlenb
+; CHECK-RV32-NEXT:    slli a0, a0, 3
+; CHECK-RV32-NEXT:    add a0, sp, a0
+; CHECK-RV32-NEXT:    addi a0, a0, 16
+; CHECK-RV32-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
+; CHECK-RV32-NEXT:    addi a0, sp, 16
+; CHECK-RV32-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
+; CHECK-RV32-NEXT:    csrr a0, vlenb
+; CHECK-RV32-NEXT:    slli a0, a0, 4
+; CHECK-RV32-NEXT:    add a0, sp, a0
+; CHECK-RV32-NEXT:    addi a0, a0, 16
+; CHECK-RV32-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
+; CHECK-RV32-NEXT:    vrgather.vv v16, v24, v8, v0.t
+; CHECK-RV32-NEXT:    csrr a0, vlenb
+; CHECK-RV32-NEXT:    li a1, 24
+; CHECK-RV32-NEXT:    mul a0, a0, a1
+; CHECK-RV32-NEXT:    add a0, sp, a0
+; CHECK-RV32-NEXT:    addi a0, a0, 16
+; CHECK-RV32-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
+; CHECK-RV32-NEXT:    csrr a0, vlenb
+; CHECK-RV32-NEXT:    slli a0, a0, 5
+; CHECK-RV32-NEXT:    add sp, sp, a0
+; CHECK-RV32-NEXT:    addi sp, sp, 16
+; CHECK-RV32-NEXT:    ret
 ;
-; CHECK-VRGATHER-RV64-LABEL: test_expandload_v256i8:
-; CHECK-VRGATHER-RV64:       # %bb.0:
-; CHECK-VRGATHER-RV64-NEXT:    addi sp, sp, -16
-; CHECK-VRGATHER-RV64-NEXT:    .cfi_def_cfa_offset 16
-; CHECK-VRGATHER-RV64-NEXT:    csrr a2, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a2, 5
-; CHECK-VRGATHER-RV64-NEXT:    sub sp, sp, a2
-; CHECK-VRGATHER-RV64-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x20, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 32 * vlenb
-; CHECK-VRGATHER-RV64-NEXT:    csrr a2, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 24
-; CHECK-VRGATHER-RV64-NEXT:    mul a2, a2, a3
-; CHECK-VRGATHER-RV64-NEXT:    add a2, sp, a2
-; CHECK-VRGATHER-RV64-NEXT:    addi a2, a2, 16
-; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v16, (a2) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v7, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 128
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vle8.v v8, (a1)
-; CHECK-VRGATHER-RV64-NEXT:    addi a1, sp, 16
-; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v8, (a1) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslidedown.vi v9, v0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv.x.s a1, v9
-; CHECK-VRGATHER-RV64-NEXT:    vmv.x.s a3, v0
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vcpop.m a4, v0
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a4, e8, m8, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vle8.v v24, (a0)
-; CHECK-VRGATHER-RV64-NEXT:    csrr a4, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    slli a4, a4, 4
-; CHECK-VRGATHER-RV64-NEXT:    add a4, sp, a4
-; CHECK-VRGATHER-RV64-NEXT:    addi a4, a4, 16
-; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v24, (a4) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vcpop.m a4, v7
-; CHECK-VRGATHER-RV64-NEXT:    cpop a3, a3
-; CHECK-VRGATHER-RV64-NEXT:    cpop a1, a1
-; CHECK-VRGATHER-RV64-NEXT:    add a0, a0, a3
-; CHECK-VRGATHER-RV64-NEXT:    add a0, a0, a1
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a4, e8, m8, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vle8.v v8, (a0)
-; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 3
-; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v8, (a0) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, ta, mu
-; CHECK-VRGATHER-RV64-NEXT:    viota.m v16, v0
-; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 4
-; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 24
-; CHECK-VRGATHER-RV64-NEXT:    mul a0, a0, a1
-; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV64-NEXT:    vrgather.vv v8, v24, v16, v0.t
-; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 24
-; CHECK-VRGATHER-RV64-NEXT:    mul a0, a0, a1
-; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v8, (a0) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV64-NEXT:    viota.m v16, v7
-; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 4
-; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v0, v7
-; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 3
-; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 4
-; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, sp, 16
-; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV64-NEXT:    vrgather.vv v8, v24, v16, v0.t
-; CHECK-VRGATHER-RV64-NEXT:    vmv.v.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 24
-; CHECK-VRGATHER-RV64-NEXT:    mul a0, a0, a1
-; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 5
-; CHECK-VRGATHER-RV64-NEXT:    add sp, sp, a0
-; CHECK-VRGATHER-RV64-NEXT:    addi sp, sp, 16
-; CHECK-VRGATHER-RV64-NEXT:    ret
-;
-; CHECK-INDEXED-RV32-LABEL: test_expandload_v256i8:
-; CHECK-INDEXED-RV32:       # %bb.0:
-; CHECK-INDEXED-RV32-NEXT:    addi sp, sp, -16
-; CHECK-INDEXED-RV32-NEXT:    .cfi_def_cfa_offset 16
-; CHECK-INDEXED-RV32-NEXT:    csrr a2, vlenb
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a2, 4
-; CHECK-INDEXED-RV32-NEXT:    sub sp, sp, a2
-; CHECK-INDEXED-RV32-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
-; CHECK-INDEXED-RV32-NEXT:    addi a2, sp, 16
-; CHECK-INDEXED-RV32-NEXT:    vs8r.v v16, (a2) # Unknown-size Folded Spill
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v9, v0
-; CHECK-INDEXED-RV32-NEXT:    li a2, 128
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vle8.v v16, (a1)
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vslidedown.vi v10, v0, 1
-; CHECK-INDEXED-RV32-NEXT:    li a1, 32
-; CHECK-INDEXED-RV32-NEXT:    vsrl.vx v11, v10, a1
-; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a3, v11
-; CHECK-INDEXED-RV32-NEXT:    vsrl.vx v11, v0, a1
-; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a1, v11
-; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a4, v10
-; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a5, v0
-; CHECK-INDEXED-RV32-NEXT:    cpop a1, a1
-; CHECK-INDEXED-RV32-NEXT:    cpop a5, a5
-; CHECK-INDEXED-RV32-NEXT:    add a1, a5, a1
-; CHECK-INDEXED-RV32-NEXT:    cpop a3, a3
-; CHECK-INDEXED-RV32-NEXT:    cpop a4, a4
-; CHECK-INDEXED-RV32-NEXT:    add a3, a4, a3
-; CHECK-INDEXED-RV32-NEXT:    add a1, a1, a3
-; CHECK-INDEXED-RV32-NEXT:    add a1, a0, a1
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, ta, mu
-; CHECK-INDEXED-RV32-NEXT:    viota.m v24, v8
-; CHECK-INDEXED-RV32-NEXT:    csrr a2, vlenb
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a2, 3
-; CHECK-INDEXED-RV32-NEXT:    add a2, sp, a2
-; CHECK-INDEXED-RV32-NEXT:    addi a2, a2, 16
-; CHECK-INDEXED-RV32-NEXT:    vs8r.v v24, (a2) # Unknown-size Folded Spill
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v0, v8
-; CHECK-INDEXED-RV32-NEXT:    csrr a2, vlenb
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a2, 3
-; CHECK-INDEXED-RV32-NEXT:    add a2, sp, a2
-; CHECK-INDEXED-RV32-NEXT:    addi a2, a2, 16
-; CHECK-INDEXED-RV32-NEXT:    vl8r.v v24, (a2) # Unknown-size Folded Reload
-; CHECK-INDEXED-RV32-NEXT:    vluxei8.v v16, (a1), v24, v0.t
-; CHECK-INDEXED-RV32-NEXT:    viota.m v24, v9
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v0, v9
-; CHECK-INDEXED-RV32-NEXT:    addi a1, sp, 16
-; CHECK-INDEXED-RV32-NEXT:    vl8r.v v8, (a1) # Unknown-size Folded Reload
-; CHECK-INDEXED-RV32-NEXT:    vluxei8.v v8, (a0), v24, v0.t
-; CHECK-INDEXED-RV32-NEXT:    csrr a0, vlenb
-; CHECK-INDEXED-RV32-NEXT:    slli a0, a0, 4
-; CHECK-INDEXED-RV32-NEXT:    add sp, sp, a0
-; CHECK-INDEXED-RV32-NEXT:    addi sp, sp, 16
-; CHECK-INDEXED-RV32-NEXT:    ret
-;
-; CHECK-INDEXED-RV64-LABEL: test_expandload_v256i8:
-; CHECK-INDEXED-RV64:       # %bb.0:
-; CHECK-INDEXED-RV64-NEXT:    addi sp, sp, -16
-; CHECK-INDEXED-RV64-NEXT:    .cfi_def_cfa_offset 16
-; CHECK-INDEXED-RV64-NEXT:    csrr a2, vlenb
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a2, 4
-; CHECK-INDEXED-RV64-NEXT:    sub sp, sp, a2
-; CHECK-INDEXED-RV64-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
-; CHECK-INDEXED-RV64-NEXT:    addi a2, sp, 16
-; CHECK-INDEXED-RV64-NEXT:    vs8r.v v16, (a2) # Unknown-size Folded Spill
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v9, v0
-; CHECK-INDEXED-RV64-NEXT:    li a2, 128
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vle8.v v16, (a1)
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vslidedown.vi v10, v0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv.x.s a1, v10
-; CHECK-INDEXED-RV64-NEXT:    vmv.x.s a3, v0
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, ta, mu
-; CHECK-INDEXED-RV64-NEXT:    viota.m v24, v8
-; CHECK-INDEXED-RV64-NEXT:    csrr a2, vlenb
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a2, 3
-; CHECK-INDEXED-RV64-NEXT:    add a2, sp, a2
-; CHECK-INDEXED-RV64-NEXT:    addi a2, a2, 16
-; CHECK-INDEXED-RV64-NEXT:    vs8r.v v24, (a2) # Unknown-size Folded Spill
-; CHECK-INDEXED-RV64-NEXT:    cpop a2, a3
-; CHECK-INDEXED-RV64-NEXT:    cpop a1, a1
-; CHECK-INDEXED-RV64-NEXT:    add a2, a0, a2
-; CHECK-INDEXED-RV64-NEXT:    add a1, a2, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v0, v8
-; CHECK-INDEXED-RV64-NEXT:    csrr a2, vlenb
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a2, 3
-; CHECK-INDEXED-RV64-NEXT:    add a2, sp, a2
-; CHECK-INDEXED-RV64-NEXT:    addi a2, a2, 16
-; CHECK-INDEXED-RV64-NEXT:    vl8r.v v24, (a2) # Unknown-size Folded Reload
-; CHECK-INDEXED-RV64-NEXT:    vluxei8.v v16, (a1), v24, v0.t
-; CHECK-INDEXED-RV64-NEXT:    viota.m v24, v9
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v0, v9
-; CHECK-INDEXED-RV64-NEXT:    addi a1, sp, 16
-; CHECK-INDEXED-RV64-NEXT:    vl8r.v v8, (a1) # Unknown-size Folded Reload
-; CHECK-INDEXED-RV64-NEXT:    vluxei8.v v8, (a0), v24, v0.t
-; CHECK-INDEXED-RV64-NEXT:    csrr a0, vlenb
-; CHECK-INDEXED-RV64-NEXT:    slli a0, a0, 4
-; CHECK-INDEXED-RV64-NEXT:    add sp, sp, a0
-; CHECK-INDEXED-RV64-NEXT:    addi sp, sp, 16
-; CHECK-INDEXED-RV64-NEXT:    ret
+; CHECK-RV64-LABEL: test_expandload_v256i8:
+; CHECK-RV64:       # %bb.0:
+; CHECK-RV64-NEXT:    addi sp, sp, -16
+; CHECK-RV64-NEXT:    .cfi_def_cfa_offset 16
+; CHECK-RV64-NEXT:    csrr a2, vlenb
+; CHECK-RV64-NEXT:    slli a2, a2, 5
+; CHECK-RV64-NEXT:    sub sp, sp, a2
+; CHECK-RV64-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x20, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 32 * vlenb
+; CHECK-RV64-NEXT:    csrr a2, vlenb
+; CHECK-RV64-NEXT:    li a3, 24
+; CHECK-RV64-NEXT:    mul a2, a2, a3
+; CHECK-RV64-NEXT:    add a2, sp, a2
+; CHECK-RV64-NEXT:    addi a2, a2, 16
+; CHECK-RV64-NEXT:    vs8r.v v16, (a2) # Unknown-size Folded Spill
+; CHECK-RV64-NEXT:    vmv1r.v v7, v8
+; CHECK-RV64-NEXT:    li a2, 128
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, ta, ma
+; CHECK-RV64-NEXT:    vle8.v v8, (a1)
+; CHECK-RV64-NEXT:    addi a1, sp, 16
+; CHECK-RV64-NEXT:    vs8r.v v8, (a1) # Unknown-size Folded Spill
+; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV64-NEXT:    vslidedown.vi v9, v0, 1
+; CHECK-RV64-NEXT:    vmv.x.s a1, v9
+; CHECK-RV64-NEXT:    vmv.x.s a3, v0
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, ta, ma
+; CHECK-RV64-NEXT:    vcpop.m a4, v0
+; CHECK-RV64-NEXT:    vsetvli zero, a4, e8, m8, ta, ma
+; CHECK-RV64-NEXT:    vle8.v v24, (a0)
+; CHECK-RV64-NEXT:    csrr a4, vlenb
+; CHECK-RV64-NEXT:    slli a4, a4, 4
+; CHECK-RV64-NEXT:    add a4, sp, a4
+; CHECK-RV64-NEXT:    addi a4, a4, 16
+; CHECK-RV64-NEXT:    vs8r.v v24, (a4) # Unknown-size Folded Spill
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, ta, ma
+; CHECK-RV64-NEXT:    vcpop.m a4, v7
+; CHECK-RV64-NEXT:    cpop a3, a3
+; CHECK-RV64-NEXT:    cpop a1, a1
+; CHECK-RV64-NEXT:    add a0, a0, a3
+; CHECK-RV64-NEXT:    add a0, a0, a1
+; CHECK-RV64-NEXT:    vsetvli zero, a4, e8, m8, ta, ma
+; CHECK-RV64-NEXT:    vle8.v v8, (a0)
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    slli a0, a0, 3
+; CHECK-RV64-NEXT:    add a0, sp, a0
+; CHECK-RV64-NEXT:    addi a0, a0, 16
+; CHECK-RV64-NEXT:    vs8r.v v8, (a0) # Unknown-size Folded Spill
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, ta, mu
+; CHECK-RV64-NEXT:    viota.m v16, v0
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    slli a0, a0, 4
+; CHECK-RV64-NEXT:    add a0, sp, a0
+; CHECK-RV64-NEXT:    addi a0, a0, 16
+; CHECK-RV64-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    li a1, 24
+; CHECK-RV64-NEXT:    mul a0, a0, a1
+; CHECK-RV64-NEXT:    add a0, sp, a0
+; CHECK-RV64-NEXT:    addi a0, a0, 16
+; CHECK-RV64-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
+; CHECK-RV64-NEXT:    vrgather.vv v8, v24, v16, v0.t
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    li a1, 24
+; CHECK-RV64-NEXT:    mul a0, a0, a1
+; CHECK-RV64-NEXT:    add a0, sp, a0
+; CHECK-RV64-NEXT:    addi a0, a0, 16
+; CHECK-RV64-NEXT:    vs8r.v v8, (a0) # Unknown-size Folded Spill
+; CHECK-RV64-NEXT:    viota.m v16, v7
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    slli a0, a0, 4
+; CHECK-RV64-NEXT:    add a0, sp, a0
+; CHECK-RV64-NEXT:    addi a0, a0, 16
+; CHECK-RV64-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
+; CHECK-RV64-NEXT:    vmv1r.v v0, v7
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    slli a0, a0, 3
+; CHECK-RV64-NEXT:    add a0, sp, a0
+; CHECK-RV64-NEXT:    addi a0, a0, 16
+; CHECK-RV64-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    slli a0, a0, 4
+; CHECK-RV64-NEXT:    add a0, sp, a0
+; CHECK-RV64-NEXT:    addi a0, a0, 16
+; CHECK-RV64-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
+; CHECK-RV64-NEXT:    addi a0, sp, 16
+; CHECK-RV64-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
+; CHECK-RV64-NEXT:    vrgather.vv v8, v24, v16, v0.t
+; CHECK-RV64-NEXT:    vmv.v.v v16, v8
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    li a1, 24
+; CHECK-RV64-NEXT:    mul a0, a0, a1
+; CHECK-RV64-NEXT:    add a0, sp, a0
+; CHECK-RV64-NEXT:    addi a0, a0, 16
+; CHECK-RV64-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    slli a0, a0, 5
+; CHECK-RV64-NEXT:    add sp, sp, a0
+; CHECK-RV64-NEXT:    addi sp, sp, 16
+; CHECK-RV64-NEXT:    ret
   %res = call <256 x i8> @llvm.masked.expandload.v256i8(ptr align 1 %base, <256 x i1> %mask, <256 x i8> %passthru)
   ret <256 x i8> %res
 }
 
 define <256 x i8> @test_expandload_v256i8_all_ones(ptr %base, <256 x i8> %passthru) {
-; CHECK-VRGATHER-RV32-LABEL: test_expandload_v256i8_all_ones:
-; CHECK-VRGATHER-RV32:       # %bb.0:
-; CHECK-VRGATHER-RV32-NEXT:    li a1, 128
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmset.m v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 32
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vsrl.vx v9, v8, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a3, v9
-; CHECK-VRGATHER-RV32-NEXT:    cpop a3, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a4, v8
-; CHECK-VRGATHER-RV32-NEXT:    cpop a4, a4
-; CHECK-VRGATHER-RV32-NEXT:    add a3, a4, a3
-; CHECK-VRGATHER-RV32-NEXT:    vslidedown.vi v8, v8, 1
-; CHECK-VRGATHER-RV32-NEXT:    vsrl.vx v9, v8, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a2, v9
-; CHECK-VRGATHER-RV32-NEXT:    cpop a2, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a4, v8
-; CHECK-VRGATHER-RV32-NEXT:    cpop a4, a4
-; CHECK-VRGATHER-RV32-NEXT:    add a2, a4, a2
-; CHECK-VRGATHER-RV32-NEXT:    add a3, a0, a3
-; CHECK-VRGATHER-RV32-NEXT:    add a2, a3, a2
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vle8.v v16, (a2)
-; CHECK-VRGATHER-RV32-NEXT:    vle8.v v8, (a0)
-; CHECK-VRGATHER-RV32-NEXT:    ret
-;
-; CHECK-VRGATHER-RV64-LABEL: test_expandload_v256i8_all_ones:
-; CHECK-VRGATHER-RV64:       # %bb.0:
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 128
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vle8.v v8, (a0)
-; CHECK-VRGATHER-RV64-NEXT:    vmset.m v16
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.x.s a2, v16
-; CHECK-VRGATHER-RV64-NEXT:    cpop a2, a2
-; CHECK-VRGATHER-RV64-NEXT:    vslidedown.vi v16, v16, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv.x.s a3, v16
-; CHECK-VRGATHER-RV64-NEXT:    cpop a3, a3
-; CHECK-VRGATHER-RV64-NEXT:    add a0, a0, a2
-; CHECK-VRGATHER-RV64-NEXT:    add a0, a0, a3
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vle8.v v16, (a0)
-; CHECK-VRGATHER-RV64-NEXT:    ret
+; CHECK-RV32-LABEL: test_expandload_v256i8_all_ones:
+; CHECK-RV32:       # %bb.0:
+; CHECK-RV32-NEXT:    li a1, 128
+; CHECK-RV32-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
+; CHECK-RV32-NEXT:    vmset.m v8
+; CHECK-RV32-NEXT:    li a2, 32
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vsrl.vx v9, v8, a2
+; CHECK-RV32-NEXT:    vmv.x.s a3, v9
+; CHECK-RV32-NEXT:    cpop a3, a3
+; CHECK-RV32-NEXT:    vmv.x.s a4, v8
+; CHECK-RV32-NEXT:    cpop a4, a4
+; CHECK-RV32-NEXT:    add a3, a4, a3
+; CHECK-RV32-NEXT:    vslidedown.vi v8, v8, 1
+; CHECK-RV32-NEXT:    vsrl.vx v9, v8, a2
+; CHECK-RV32-NEXT:    vmv.x.s a2, v9
+; CHECK-RV32-NEXT:    cpop a2, a2
+; CHECK-RV32-NEXT:    vmv.x.s a4, v8
+; CHECK-RV32-NEXT:    cpop a4, a4
+; CHECK-RV32-NEXT:    add a2, a4, a2
+; CHECK-RV32-NEXT:    add a3, a0, a3
+; CHECK-RV32-NEXT:    add a2, a3, a2
+; CHECK-RV32-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
+; CHECK-RV32-NEXT:    vle8.v v16, (a2)
+; CHECK-RV32-NEXT:    vle8.v v8, (a0)
+; CHECK-RV32-NEXT:    ret
 ;
-; CHECK-INDEXED-RV32-LABEL: test_expandload_v256i8_all_ones:
-; CHECK-INDEXED-RV32:       # %bb.0:
-; CHECK-INDEXED-RV32-NEXT:    li a1, 128
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmset.m v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 32
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vsrl.vx v9, v8, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a3, v9
-; CHECK-INDEXED-RV32-NEXT:    cpop a3, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a4, v8
-; CHECK-INDEXED-RV32-NEXT:    cpop a4, a4
-; CHECK-INDEXED-RV32-NEXT:    add a3, a4, a3
-; CHECK-INDEXED-RV32-NEXT:    vslidedown.vi v8, v8, 1
-; CHECK-INDEXED-RV32-NEXT:    vsrl.vx v9, v8, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a2, v9
-; CHECK-INDEXED-RV32-NEXT:    cpop a2, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a4, v8
-; CHECK-INDEXED-RV32-NEXT:    cpop a4, a4
-; CHECK-INDEXED-RV32-NEXT:    add a2, a4, a2
-; CHECK-INDEXED-RV32-NEXT:    add a3, a0, a3
-; CHECK-INDEXED-RV32-NEXT:    add a2, a3, a2
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vle8.v v16, (a2)
-; CHECK-INDEXED-RV32-NEXT:    vle8.v v8, (a0)
-; CHECK-INDEXED-RV32-NEXT:    ret
-;
-; CHECK-INDEXED-RV64-LABEL: test_expandload_v256i8_all_ones:
-; CHECK-INDEXED-RV64:       # %bb.0:
-; CHECK-INDEXED-RV64-NEXT:    li a1, 128
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vle8.v v8, (a0)
-; CHECK-INDEXED-RV64-NEXT:    vmset.m v16
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.x.s a2, v16
-; CHECK-INDEXED-RV64-NEXT:    cpop a2, a2
-; CHECK-INDEXED-RV64-NEXT:    vslidedown.vi v16, v16, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv.x.s a3, v16
-; CHECK-INDEXED-RV64-NEXT:    cpop a3, a3
-; CHECK-INDEXED-RV64-NEXT:    add a0, a0, a2
-; CHECK-INDEXED-RV64-NEXT:    add a0, a0, a3
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vle8.v v16, (a0)
-; CHECK-INDEXED-RV64-NEXT:    ret
+; CHECK-RV64-LABEL: test_expandload_v256i8_all_ones:
+; CHECK-RV64:       # %bb.0:
+; CHECK-RV64-NEXT:    li a1, 128
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
+; CHECK-RV64-NEXT:    vle8.v v8, (a0)
+; CHECK-RV64-NEXT:    vmset.m v16
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.x.s a2, v16
+; CHECK-RV64-NEXT:    cpop a2, a2
+; CHECK-RV64-NEXT:    vslidedown.vi v16, v16, 1
+; CHECK-RV64-NEXT:    vmv.x.s a3, v16
+; CHECK-RV64-NEXT:    cpop a3, a3
+; CHECK-RV64-NEXT:    add a0, a0, a2
+; CHECK-RV64-NEXT:    add a0, a0, a3
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
+; CHECK-RV64-NEXT:    vle8.v v16, (a0)
+; CHECK-RV64-NEXT:    ret
   %res = call <256 x i8> @llvm.masked.expandload.v256i8(ptr align 1 %base, <256 x i1> splat (i1 true), <256 x i8> %passthru)
   ret <256 x i8> %res
 }
@@ -685,25 +474,16 @@ define <256 x i8> @test_expandload_v256i8_all_ones(ptr %base, <256 x i8> %passth
 ; Load + expand for i16 type
 
 define <1 x i16> @test_expandload_v1i16(ptr %base, <1 x i1> %mask, <1 x i16> %passthru) {
-; CHECK-VRGATHER-LABEL: test_expandload_v1i16:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e8, mf8, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e16, mf4, ta, ma
-; CHECK-VRGATHER-NEXT:    vle16.v v9, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e16, mf4, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v10, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: test_expandload_v1i16:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    vsetivli zero, 1, e16, mf4, ta, ma
-; CHECK-INDEXED-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 1, v0.t
-; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e16, mf4, ta, mu
-; CHECK-INDEXED-NEXT:    vluxei16.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: test_expandload_v1i16:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 1, e8, mf8, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e16, mf4, ta, ma
+; CHECK-NEXT:    vle16.v v9, (a0)
+; CHECK-NEXT:    vsetivli zero, 1, e16, mf4, ta, mu
+; CHECK-NEXT:    viota.m v10, v0
+; CHECK-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-NEXT:    ret
   %res = call <1 x i16> @llvm.masked.expandload.v1i16(ptr align 2 %base, <1 x i1> %mask, <1 x i16> %passthru)
   ret <1 x i16> %res
 }
@@ -719,25 +499,16 @@ define <1 x i16> @test_expandload_v1i16_all_ones(ptr %base, <1 x i16> %passthru)
 }
 
 define <2 x i16> @test_expandload_v2i16(ptr %base, <2 x i1> %mask, <2 x i16> %passthru) {
-; CHECK-VRGATHER-LABEL: test_expandload_v2i16:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e16, mf4, ta, ma
-; CHECK-VRGATHER-NEXT:    vle16.v v9, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e16, mf4, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v10, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: test_expandload_v2i16:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    vsetivli zero, 2, e16, mf4, ta, ma
-; CHECK-INDEXED-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 1, v0.t
-; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e16, mf4, ta, mu
-; CHECK-INDEXED-NEXT:    vluxei16.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: test_expandload_v2i16:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e16, mf4, ta, ma
+; CHECK-NEXT:    vle16.v v9, (a0)
+; CHECK-NEXT:    vsetivli zero, 2, e16, mf4, ta, mu
+; CHECK-NEXT:    viota.m v10, v0
+; CHECK-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-NEXT:    ret
   %res = call <2 x i16> @llvm.masked.expandload.v2i16(ptr align 2 %base, <2 x i1> %mask, <2 x i16> %passthru)
   ret <2 x i16> %res
 }
@@ -753,25 +524,16 @@ define <2 x i16> @test_expandload_v2i16_all_ones(ptr %base, <2 x i16> %passthru)
 }
 
 define <4 x i16> @test_expandload_v4i16(ptr %base, <4 x i1> %mask, <4 x i16> %passthru) {
-; CHECK-VRGATHER-LABEL: test_expandload_v4i16:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e8, mf4, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e16, mf2, ta, ma
-; CHECK-VRGATHER-NEXT:    vle16.v v9, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e16, mf2, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v10, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: test_expandload_v4i16:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    vsetivli zero, 4, e16, mf2, ta, ma
-; CHECK-INDEXED-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 1, v0.t
-; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e16, mf2, ta, mu
-; CHECK-INDEXED-NEXT:    vluxei16.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: test_expandload_v4i16:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 4, e8, mf4, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e16, mf2, ta, ma
+; CHECK-NEXT:    vle16.v v9, (a0)
+; CHECK-NEXT:    vsetivli zero, 4, e16, mf2, ta, mu
+; CHECK-NEXT:    viota.m v10, v0
+; CHECK-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-NEXT:    ret
   %res = call <4 x i16> @llvm.masked.expandload.v4i16(ptr align 2 %base, <4 x i1> %mask, <4 x i16> %passthru)
   ret <4 x i16> %res
 }
@@ -787,25 +549,16 @@ define <4 x i16> @test_expandload_v4i16_all_ones(ptr %base, <4 x i16> %passthru)
 }
 
 define <8 x i16> @test_expandload_v8i16(ptr %base, <8 x i1> %mask, <8 x i16> %passthru) {
-; CHECK-VRGATHER-LABEL: test_expandload_v8i16:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e16, m1, ta, ma
-; CHECK-VRGATHER-NEXT:    vle16.v v9, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e16, m1, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v10, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: test_expandload_v8i16:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    vsetivli zero, 8, e16, m1, ta, ma
-; CHECK-INDEXED-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 1, v0.t
-; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e16, m1, ta, mu
-; CHECK-INDEXED-NEXT:    vluxei16.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: test_expandload_v8i16:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e16, m1, ta, ma
+; CHECK-NEXT:    vle16.v v9, (a0)
+; CHECK-NEXT:    vsetivli zero, 8, e16, m1, ta, mu
+; CHECK-NEXT:    viota.m v10, v0
+; CHECK-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-NEXT:    ret
   %res = call <8 x i16> @llvm.masked.expandload.v8i16(ptr align 2 %base, <8 x i1> %mask, <8 x i16> %passthru)
   ret <8 x i16> %res
 }
@@ -821,25 +574,16 @@ define <8 x i16> @test_expandload_v8i16_all_ones(ptr %base, <8 x i16> %passthru)
 }
 
 define <16 x i16> @test_expandload_v16i16(ptr %base, <16 x i1> %mask, <16 x i16> %passthru) {
-; CHECK-VRGATHER-LABEL: test_expandload_v16i16:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 16, e8, m1, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e16, m2, ta, ma
-; CHECK-VRGATHER-NEXT:    vle16.v v10, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 16, e16, m2, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v12, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v10, v12, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: test_expandload_v16i16:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    vsetivli zero, 16, e16, m2, ta, ma
-; CHECK-INDEXED-NEXT:    viota.m v10, v0
-; CHECK-INDEXED-NEXT:    vsll.vi v10, v10, 1, v0.t
-; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e16, m2, ta, mu
-; CHECK-INDEXED-NEXT:    vluxei16.v v8, (a0), v10, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: test_expandload_v16i16:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 16, e8, m1, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e16, m2, ta, ma
+; CHECK-NEXT:    vle16.v v10, (a0)
+; CHECK-NEXT:    vsetivli zero, 16, e16, m2, ta, mu
+; CHECK-NEXT:    viota.m v12, v0
+; CHECK-NEXT:    vrgather.vv v8, v10, v12, v0.t
+; CHECK-NEXT:    ret
   %res = call <16 x i16> @llvm.masked.expandload.v16i16(ptr align 2 %base, <16 x i1> %mask, <16 x i16> %passthru)
   ret <16 x i16> %res
 }
@@ -855,27 +599,17 @@ define <16 x i16> @test_expandload_v16i16_all_ones(ptr %base, <16 x i16> %passth
 }
 
 define <32 x i16> @test_expandload_v32i16(ptr %base, <32 x i1> %mask, <32 x i16> %passthru) {
-; CHECK-VRGATHER-LABEL: test_expandload_v32i16:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    li a1, 32
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e8, m2, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a2, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a2, e16, m4, ta, ma
-; CHECK-VRGATHER-NEXT:    vle16.v v12, (a0)
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e16, m4, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v16, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v12, v16, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: test_expandload_v32i16:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    li a1, 32
-; CHECK-INDEXED-NEXT:    vsetvli zero, a1, e16, m4, ta, ma
-; CHECK-INDEXED-NEXT:    viota.m v12, v0
-; CHECK-INDEXED-NEXT:    vsll.vi v12, v12, 1, v0.t
-; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e16, m4, ta, mu
-; CHECK-INDEXED-NEXT:    vluxei16.v v8, (a0), v12, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: test_expandload_v32i16:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    li a1, 32
+; CHECK-NEXT:    vsetvli zero, a1, e8, m2, ta, ma
+; CHECK-NEXT:    vcpop.m a2, v0
+; CHECK-NEXT:    vsetvli zero, a2, e16, m4, ta, ma
+; CHECK-NEXT:    vle16.v v12, (a0)
+; CHECK-NEXT:    vsetvli zero, a1, e16, m4, ta, mu
+; CHECK-NEXT:    viota.m v16, v0
+; CHECK-NEXT:    vrgather.vv v8, v12, v16, v0.t
+; CHECK-NEXT:    ret
   %res = call <32 x i16> @llvm.masked.expandload.v32i16(ptr align 2 %base, <32 x i1> %mask, <32 x i16> %passthru)
   ret <32 x i16> %res
 }
@@ -892,27 +626,17 @@ define <32 x i16> @test_expandload_v32i16_all_ones(ptr %base, <32 x i16> %passth
 }
 
 define <64 x i16> @test_expandload_v64i16(ptr %base, <64 x i1> %mask, <64 x i16> %passthru) {
-; CHECK-VRGATHER-LABEL: test_expandload_v64i16:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    li a1, 64
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e8, m4, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a2, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a2, e16, m8, ta, ma
-; CHECK-VRGATHER-NEXT:    vle16.v v16, (a0)
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e16, m8, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v24, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v16, v24, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: test_expandload_v64i16:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    li a1, 64
-; CHECK-INDEXED-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
-; CHECK-INDEXED-NEXT:    viota.m v16, v0
-; CHECK-INDEXED-NEXT:    vsll.vi v16, v16, 1, v0.t
-; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e16, m8, ta, mu
-; CHECK-INDEXED-NEXT:    vluxei16.v v8, (a0), v16, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: test_expandload_v64i16:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    li a1, 64
+; CHECK-NEXT:    vsetvli zero, a1, e8, m4, ta, ma
+; CHECK-NEXT:    vcpop.m a2, v0
+; CHECK-NEXT:    vsetvli zero, a2, e16, m8, ta, ma
+; CHECK-NEXT:    vle16.v v16, (a0)
+; CHECK-NEXT:    vsetvli zero, a1, e16, m8, ta, mu
+; CHECK-NEXT:    viota.m v24, v0
+; CHECK-NEXT:    vrgather.vv v8, v16, v24, v0.t
+; CHECK-NEXT:    ret
   %res = call <64 x i16> @llvm.masked.expandload.v64i16(ptr align 2 %base, <64 x i1> %mask, <64 x i16> %passthru)
   ret <64 x i16> %res
 }
@@ -929,390 +653,250 @@ define <64 x i16> @test_expandload_v64i16_all_ones(ptr %base, <64 x i16> %passth
 }
 
 define <128 x i16> @test_expandload_v128i16(ptr %base, <128 x i1> %mask, <128 x i16> %passthru) {
-; CHECK-VRGATHER-RV32-LABEL: test_expandload_v128i16:
-; CHECK-VRGATHER-RV32:       # %bb.0:
-; CHECK-VRGATHER-RV32-NEXT:    addi sp, sp, -16
-; CHECK-VRGATHER-RV32-NEXT:    .cfi_def_cfa_offset 16
-; CHECK-VRGATHER-RV32-NEXT:    csrr a1, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 40
-; CHECK-VRGATHER-RV32-NEXT:    mul a1, a1, a2
-; CHECK-VRGATHER-RV32-NEXT:    sub sp, sp, a1
-; CHECK-VRGATHER-RV32-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x28, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 40 * vlenb
-; CHECK-VRGATHER-RV32-NEXT:    csrr a1, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 24
-; CHECK-VRGATHER-RV32-NEXT:    mul a1, a1, a2
-; CHECK-VRGATHER-RV32-NEXT:    add a1, sp, a1
-; CHECK-VRGATHER-RV32-NEXT:    addi a1, a1, 16
-; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV32-NEXT:    csrr a1, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a1, 5
-; CHECK-VRGATHER-RV32-NEXT:    add a1, sp, a1
-; CHECK-VRGATHER-RV32-NEXT:    addi a1, a1, 16
-; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v8, (a1) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV32-NEXT:    li a1, 64
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a1, e8, m4, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vcpop.m a2, v0
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e16, m8, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vle16.v v8, (a0)
-; CHECK-VRGATHER-RV32-NEXT:    csrr a2, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a2, 4
-; CHECK-VRGATHER-RV32-NEXT:    add a2, sp, a2
-; CHECK-VRGATHER-RV32-NEXT:    addi a2, a2, 16
-; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v8, (a2) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 8, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslidedown.vi v7, v0, 8
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a1, e8, m4, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vcpop.m a2, v7
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 32
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vsrl.vx v25, v0, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a3, v25
-; CHECK-VRGATHER-RV32-NEXT:    cpop a3, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a4, v0
-; CHECK-VRGATHER-RV32-NEXT:    cpop a4, a4
-; CHECK-VRGATHER-RV32-NEXT:    add a3, a4, a3
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a3, 1
-; CHECK-VRGATHER-RV32-NEXT:    add a0, a0, a3
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e16, m8, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vle16.v v8, (a0)
-; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 3
-; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v8, (a0) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, mu
-; CHECK-VRGATHER-RV32-NEXT:    viota.m v8, v0
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, sp, 16
-; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v8, (a0) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 4
-; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 5
-; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, sp, 16
-; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV32-NEXT:    vrgather.vv v8, v16, v24, v0.t
-; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 5
-; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v8, (a0) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV32-NEXT:    viota.m v16, v7
-; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 4
-; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v0, v7
-; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 3
-; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 4
-; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    li a1, 24
-; CHECK-VRGATHER-RV32-NEXT:    mul a0, a0, a1
-; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV32-NEXT:    vrgather.vv v8, v16, v24, v0.t
-; CHECK-VRGATHER-RV32-NEXT:    vmv.v.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 5
-; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    li a1, 40
-; CHECK-VRGATHER-RV32-NEXT:    mul a0, a0, a1
-; CHECK-VRGATHER-RV32-NEXT:    add sp, sp, a0
-; CHECK-VRGATHER-RV32-NEXT:    addi sp, sp, 16
-; CHECK-VRGATHER-RV32-NEXT:    ret
+; CHECK-RV32-LABEL: test_expandload_v128i16:
+; CHECK-RV32:       # %bb.0:
+; CHECK-RV32-NEXT:    addi sp, sp, -16
+; CHECK-RV32-NEXT:    .cfi_def_cfa_offset 16
+; CHECK-RV32-NEXT:    csrr a1, vlenb
+; CHECK-RV32-NEXT:    li a2, 40
+; CHECK-RV32-NEXT:    mul a1, a1, a2
+; CHECK-RV32-NEXT:    sub sp, sp, a1
+; CHECK-RV32-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x28, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 40 * vlenb
+; CHECK-RV32-NEXT:    csrr a1, vlenb
+; CHECK-RV32-NEXT:    li a2, 24
+; CHECK-RV32-NEXT:    mul a1, a1, a2
+; CHECK-RV32-NEXT:    add a1, sp, a1
+; CHECK-RV32-NEXT:    addi a1, a1, 16
+; CHECK-RV32-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
+; CHECK-RV32-NEXT:    csrr a1, vlenb
+; CHECK-RV32-NEXT:    slli a1, a1, 5
+; CHECK-RV32-NEXT:    add a1, sp, a1
+; CHECK-RV32-NEXT:    addi a1, a1, 16
+; CHECK-RV32-NEXT:    vs8r.v v8, (a1) # Unknown-size Folded Spill
+; CHECK-RV32-NEXT:    li a1, 64
+; CHECK-RV32-NEXT:    vsetvli zero, a1, e8, m4, ta, ma
+; CHECK-RV32-NEXT:    vcpop.m a2, v0
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e16, m8, ta, ma
+; CHECK-RV32-NEXT:    vle16.v v8, (a0)
+; CHECK-RV32-NEXT:    csrr a2, vlenb
+; CHECK-RV32-NEXT:    slli a2, a2, 4
+; CHECK-RV32-NEXT:    add a2, sp, a2
+; CHECK-RV32-NEXT:    addi a2, a2, 16
+; CHECK-RV32-NEXT:    vs8r.v v8, (a2) # Unknown-size Folded Spill
+; CHECK-RV32-NEXT:    vsetivli zero, 8, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vslidedown.vi v7, v0, 8
+; CHECK-RV32-NEXT:    vsetvli zero, a1, e8, m4, ta, ma
+; CHECK-RV32-NEXT:    vcpop.m a2, v7
+; CHECK-RV32-NEXT:    li a3, 32
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vsrl.vx v25, v0, a3
+; CHECK-RV32-NEXT:    vmv.x.s a3, v25
+; CHECK-RV32-NEXT:    cpop a3, a3
+; CHECK-RV32-NEXT:    vmv.x.s a4, v0
+; CHECK-RV32-NEXT:    cpop a4, a4
+; CHECK-RV32-NEXT:    add a3, a4, a3
+; CHECK-RV32-NEXT:    slli a3, a3, 1
+; CHECK-RV32-NEXT:    add a0, a0, a3
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e16, m8, ta, ma
+; CHECK-RV32-NEXT:    vle16.v v8, (a0)
+; CHECK-RV32-NEXT:    csrr a0, vlenb
+; CHECK-RV32-NEXT:    slli a0, a0, 3
+; CHECK-RV32-NEXT:    add a0, sp, a0
+; CHECK-RV32-NEXT:    addi a0, a0, 16
+; CHECK-RV32-NEXT:    vs8r.v v8, (a0) # Unknown-size Folded Spill
+; CHECK-RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, mu
+; CHECK-RV32-NEXT:    viota.m v8, v0
+; CHECK-RV32-NEXT:    addi a0, sp, 16
+; CHECK-RV32-NEXT:    vs8r.v v8, (a0) # Unknown-size Folded Spill
+; CHECK-RV32-NEXT:    csrr a0, vlenb
+; CHECK-RV32-NEXT:    slli a0, a0, 4
+; CHECK-RV32-NEXT:    add a0, sp, a0
+; CHECK-RV32-NEXT:    addi a0, a0, 16
+; CHECK-RV32-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
+; CHECK-RV32-NEXT:    csrr a0, vlenb
+; CHECK-RV32-NEXT:    slli a0, a0, 5
+; CHECK-RV32-NEXT:    add a0, sp, a0
+; CHECK-RV32-NEXT:    addi a0, a0, 16
+; CHECK-RV32-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
+; CHECK-RV32-NEXT:    addi a0, sp, 16
+; CHECK-RV32-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
+; CHECK-RV32-NEXT:    vrgather.vv v8, v16, v24, v0.t
+; CHECK-RV32-NEXT:    csrr a0, vlenb
+; CHECK-RV32-NEXT:    slli a0, a0, 5
+; CHECK-RV32-NEXT:    add a0, sp, a0
+; CHECK-RV32-NEXT:    addi a0, a0, 16
+; CHECK-RV32-NEXT:    vs8r.v v8, (a0) # Unknown-size Folded Spill
+; CHECK-RV32-NEXT:    viota.m v16, v7
+; CHECK-RV32-NEXT:    csrr a0, vlenb
+; CHECK-RV32-NEXT:    slli a0, a0, 4
+; CHECK-RV32-NEXT:    add a0, sp, a0
+; CHECK-RV32-NEXT:    addi a0, a0, 16
+; CHECK-RV32-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
+; CHECK-RV32-NEXT:    vmv1r.v v0, v7
+; CHECK-RV32-NEXT:    csrr a0, vlenb
+; CHECK-RV32-NEXT:    slli a0, a0, 3
+; CHECK-RV32-NEXT:    add a0, sp, a0
+; CHECK-RV32-NEXT:    addi a0, a0, 16
+; CHECK-RV32-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
+; CHECK-RV32-NEXT:    csrr a0, vlenb
+; CHECK-RV32-NEXT:    slli a0, a0, 4
+; CHECK-RV32-NEXT:    add a0, sp, a0
+; CHECK-RV32-NEXT:    addi a0, a0, 16
+; CHECK-RV32-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
+; CHECK-RV32-NEXT:    csrr a0, vlenb
+; CHECK-RV32-NEXT:    li a1, 24
+; CHECK-RV32-NEXT:    mul a0, a0, a1
+; CHECK-RV32-NEXT:    add a0, sp, a0
+; CHECK-RV32-NEXT:    addi a0, a0, 16
+; CHECK-RV32-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
+; CHECK-RV32-NEXT:    vrgather.vv v8, v16, v24, v0.t
+; CHECK-RV32-NEXT:    vmv.v.v v16, v8
+; CHECK-RV32-NEXT:    csrr a0, vlenb
+; CHECK-RV32-NEXT:    slli a0, a0, 5
+; CHECK-RV32-NEXT:    add a0, sp, a0
+; CHECK-RV32-NEXT:    addi a0, a0, 16
+; CHECK-RV32-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
+; CHECK-RV32-NEXT:    csrr a0, vlenb
+; CHECK-RV32-NEXT:    li a1, 40
+; CHECK-RV32-NEXT:    mul a0, a0, a1
+; CHECK-RV32-NEXT:    add sp, sp, a0
+; CHECK-RV32-NEXT:    addi sp, sp, 16
+; CHECK-RV32-NEXT:    ret
 ;
-; CHECK-VRGATHER-RV64-LABEL: test_expandload_v128i16:
-; CHECK-VRGATHER-RV64:       # %bb.0:
-; CHECK-VRGATHER-RV64-NEXT:    addi sp, sp, -16
-; CHECK-VRGATHER-RV64-NEXT:    .cfi_def_cfa_offset 16
-; CHECK-VRGATHER-RV64-NEXT:    csrr a1, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 40
-; CHECK-VRGATHER-RV64-NEXT:    mul a1, a1, a2
-; CHECK-VRGATHER-RV64-NEXT:    sub sp, sp, a1
-; CHECK-VRGATHER-RV64-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x28, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 40 * vlenb
-; CHECK-VRGATHER-RV64-NEXT:    csrr a1, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a1, 5
-; CHECK-VRGATHER-RV64-NEXT:    add a1, sp, a1
-; CHECK-VRGATHER-RV64-NEXT:    addi a1, a1, 16
-; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 64
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vcpop.m a2, v0
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e16, m8, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vle16.v v16, (a0)
-; CHECK-VRGATHER-RV64-NEXT:    csrr a2, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 24
-; CHECK-VRGATHER-RV64-NEXT:    mul a2, a2, a3
-; CHECK-VRGATHER-RV64-NEXT:    add a2, sp, a2
-; CHECK-VRGATHER-RV64-NEXT:    addi a2, a2, 16
-; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v16, (a2) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 8, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslidedown.vi v7, v0, 8
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vcpop.m a2, v7
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.x.s a3, v0
-; CHECK-VRGATHER-RV64-NEXT:    cpop a3, a3
-; CHECK-VRGATHER-RV64-NEXT:    slli a3, a3, 1
-; CHECK-VRGATHER-RV64-NEXT:    add a0, a0, a3
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e16, m8, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vle16.v v16, (a0)
-; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 4
-; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, mu
-; CHECK-VRGATHER-RV64-NEXT:    viota.m v16, v0
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, sp, 16
-; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 24
-; CHECK-VRGATHER-RV64-NEXT:    mul a0, a0, a1
-; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, sp, 16
-; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV64-NEXT:    vrgather.vv v8, v16, v24, v0.t
-; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 3
-; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v8, (a0) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV64-NEXT:    viota.m v16, v7
-; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 24
-; CHECK-VRGATHER-RV64-NEXT:    mul a0, a0, a1
-; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v0, v7
-; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 5
-; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 4
-; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 24
-; CHECK-VRGATHER-RV64-NEXT:    mul a0, a0, a1
-; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV64-NEXT:    vrgather.vv v16, v24, v8, v0.t
-; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 3
-; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 40
-; CHECK-VRGATHER-RV64-NEXT:    mul a0, a0, a1
-; CHECK-VRGATHER-RV64-NEXT:    add sp, sp, a0
-; CHECK-VRGATHER-RV64-NEXT:    addi sp, sp, 16
-; CHECK-VRGATHER-RV64-NEXT:    ret
-;
-; CHECK-INDEXED-RV32-LABEL: test_expandload_v128i16:
-; CHECK-INDEXED-RV32:       # %bb.0:
-; CHECK-INDEXED-RV32-NEXT:    addi sp, sp, -16
-; CHECK-INDEXED-RV32-NEXT:    .cfi_def_cfa_offset 16
-; CHECK-INDEXED-RV32-NEXT:    csrr a1, vlenb
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a1, 4
-; CHECK-INDEXED-RV32-NEXT:    sub sp, sp, a1
-; CHECK-INDEXED-RV32-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
-; CHECK-INDEXED-RV32-NEXT:    csrr a1, vlenb
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a1, 3
-; CHECK-INDEXED-RV32-NEXT:    add a1, sp, a1
-; CHECK-INDEXED-RV32-NEXT:    addi a1, a1, 16
-; CHECK-INDEXED-RV32-NEXT:    vs8r.v v8, (a1) # Unknown-size Folded Spill
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v0
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 8, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vslidedown.vi v0, v0, 8
-; CHECK-INDEXED-RV32-NEXT:    li a1, 64
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    viota.m v8, v0
-; CHECK-INDEXED-RV32-NEXT:    vsll.vi v8, v8, 1, v0.t
-; CHECK-INDEXED-RV32-NEXT:    addi a2, sp, 16
-; CHECK-INDEXED-RV32-NEXT:    vs8r.v v8, (a2) # Unknown-size Folded Spill
-; CHECK-INDEXED-RV32-NEXT:    li a2, 32
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vsrl.vx v8, v24, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a2, v8
-; CHECK-INDEXED-RV32-NEXT:    cpop a2, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a3, v24
-; CHECK-INDEXED-RV32-NEXT:    cpop a3, a3
-; CHECK-INDEXED-RV32-NEXT:    add a2, a3, a2
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a2, 1
-; CHECK-INDEXED-RV32-NEXT:    add a2, a0, a2
-; CHECK-INDEXED-RV32-NEXT:    addi a3, sp, 16
-; CHECK-INDEXED-RV32-NEXT:    vl8r.v v8, (a3) # Unknown-size Folded Reload
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, mu
-; CHECK-INDEXED-RV32-NEXT:    vluxei16.v v16, (a2), v8, v0.t
-; CHECK-INDEXED-RV32-NEXT:    viota.m v8, v24
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v0, v24
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e16, m8, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vsll.vi v8, v8, 1, v0.t
-; CHECK-INDEXED-RV32-NEXT:    addi a1, sp, 16
-; CHECK-INDEXED-RV32-NEXT:    vs8r.v v8, (a1) # Unknown-size Folded Spill
-; CHECK-INDEXED-RV32-NEXT:    csrr a1, vlenb
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a1, 3
-; CHECK-INDEXED-RV32-NEXT:    add a1, sp, a1
-; CHECK-INDEXED-RV32-NEXT:    addi a1, a1, 16
-; CHECK-INDEXED-RV32-NEXT:    vl8r.v v8, (a1) # Unknown-size Folded Reload
-; CHECK-INDEXED-RV32-NEXT:    addi a1, sp, 16
-; CHECK-INDEXED-RV32-NEXT:    vl8r.v v24, (a1) # Unknown-size Folded Reload
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e16, m8, ta, mu
-; CHECK-INDEXED-RV32-NEXT:    vluxei16.v v8, (a0), v24, v0.t
-; CHECK-INDEXED-RV32-NEXT:    csrr a0, vlenb
-; CHECK-INDEXED-RV32-NEXT:    slli a0, a0, 4
-; CHECK-INDEXED-RV32-NEXT:    add sp, sp, a0
-; CHECK-INDEXED-RV32-NEXT:    addi sp, sp, 16
-; CHECK-INDEXED-RV32-NEXT:    ret
-;
-; CHECK-INDEXED-RV64-LABEL: test_expandload_v128i16:
-; CHECK-INDEXED-RV64:       # %bb.0:
-; CHECK-INDEXED-RV64-NEXT:    addi sp, sp, -16
-; CHECK-INDEXED-RV64-NEXT:    .cfi_def_cfa_offset 16
-; CHECK-INDEXED-RV64-NEXT:    csrr a1, vlenb
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a1, 4
-; CHECK-INDEXED-RV64-NEXT:    sub sp, sp, a1
-; CHECK-INDEXED-RV64-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
-; CHECK-INDEXED-RV64-NEXT:    csrr a1, vlenb
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a1, 3
-; CHECK-INDEXED-RV64-NEXT:    add a1, sp, a1
-; CHECK-INDEXED-RV64-NEXT:    addi a1, a1, 16
-; CHECK-INDEXED-RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v7, v0
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 8, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vslidedown.vi v0, v0, 8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 64
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    viota.m v16, v0
-; CHECK-INDEXED-RV64-NEXT:    vsll.vi v16, v16, 1, v0.t
-; CHECK-INDEXED-RV64-NEXT:    addi a2, sp, 16
-; CHECK-INDEXED-RV64-NEXT:    vs8r.v v16, (a2) # Unknown-size Folded Spill
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.x.s a2, v7
-; CHECK-INDEXED-RV64-NEXT:    cpop a2, a2
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a2, 1
-; CHECK-INDEXED-RV64-NEXT:    add a2, a0, a2
-; CHECK-INDEXED-RV64-NEXT:    csrr a3, vlenb
-; CHECK-INDEXED-RV64-NEXT:    slli a3, a3, 3
-; CHECK-INDEXED-RV64-NEXT:    add a3, sp, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a3, a3, 16
-; CHECK-INDEXED-RV64-NEXT:    vl8r.v v16, (a3) # Unknown-size Folded Reload
-; CHECK-INDEXED-RV64-NEXT:    addi a3, sp, 16
-; CHECK-INDEXED-RV64-NEXT:    vl8r.v v24, (a3) # Unknown-size Folded Reload
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, mu
-; CHECK-INDEXED-RV64-NEXT:    vluxei16.v v16, (a2), v24, v0.t
-; CHECK-INDEXED-RV64-NEXT:    viota.m v24, v7
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v0, v7
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e16, m8, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vsll.vi v24, v24, 1, v0.t
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e16, m8, ta, mu
-; CHECK-INDEXED-RV64-NEXT:    vluxei16.v v8, (a0), v24, v0.t
-; CHECK-INDEXED-RV64-NEXT:    csrr a0, vlenb
-; CHECK-INDEXED-RV64-NEXT:    slli a0, a0, 4
-; CHECK-INDEXED-RV64-NEXT:    add sp, sp, a0
-; CHECK-INDEXED-RV64-NEXT:    addi sp, sp, 16
-; CHECK-INDEXED-RV64-NEXT:    ret
+; CHECK-RV64-LABEL: test_expandload_v128i16:
+; CHECK-RV64:       # %bb.0:
+; CHECK-RV64-NEXT:    addi sp, sp, -16
+; CHECK-RV64-NEXT:    .cfi_def_cfa_offset 16
+; CHECK-RV64-NEXT:    csrr a1, vlenb
+; CHECK-RV64-NEXT:    li a2, 40
+; CHECK-RV64-NEXT:    mul a1, a1, a2
+; CHECK-RV64-NEXT:    sub sp, sp, a1
+; CHECK-RV64-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x28, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 40 * vlenb
+; CHECK-RV64-NEXT:    csrr a1, vlenb
+; CHECK-RV64-NEXT:    slli a1, a1, 5
+; CHECK-RV64-NEXT:    add a1, sp, a1
+; CHECK-RV64-NEXT:    addi a1, a1, 16
+; CHECK-RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
+; CHECK-RV64-NEXT:    li a1, 64
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, ta, ma
+; CHECK-RV64-NEXT:    vcpop.m a2, v0
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e16, m8, ta, ma
+; CHECK-RV64-NEXT:    vle16.v v16, (a0)
+; CHECK-RV64-NEXT:    csrr a2, vlenb
+; CHECK-RV64-NEXT:    li a3, 24
+; CHECK-RV64-NEXT:    mul a2, a2, a3
+; CHECK-RV64-NEXT:    add a2, sp, a2
+; CHECK-RV64-NEXT:    addi a2, a2, 16
+; CHECK-RV64-NEXT:    vs8r.v v16, (a2) # Unknown-size Folded Spill
+; CHECK-RV64-NEXT:    vsetivli zero, 8, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vslidedown.vi v7, v0, 8
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, ta, ma
+; CHECK-RV64-NEXT:    vcpop.m a2, v7
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.x.s a3, v0
+; CHECK-RV64-NEXT:    cpop a3, a3
+; CHECK-RV64-NEXT:    slli a3, a3, 1
+; CHECK-RV64-NEXT:    add a0, a0, a3
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e16, m8, ta, ma
+; CHECK-RV64-NEXT:    vle16.v v16, (a0)
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    slli a0, a0, 4
+; CHECK-RV64-NEXT:    add a0, sp, a0
+; CHECK-RV64-NEXT:    addi a0, a0, 16
+; CHECK-RV64-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, mu
+; CHECK-RV64-NEXT:    viota.m v16, v0
+; CHECK-RV64-NEXT:    addi a0, sp, 16
+; CHECK-RV64-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    li a1, 24
+; CHECK-RV64-NEXT:    mul a0, a0, a1
+; CHECK-RV64-NEXT:    add a0, sp, a0
+; CHECK-RV64-NEXT:    addi a0, a0, 16
+; CHECK-RV64-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
+; CHECK-RV64-NEXT:    addi a0, sp, 16
+; CHECK-RV64-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
+; CHECK-RV64-NEXT:    vrgather.vv v8, v16, v24, v0.t
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    slli a0, a0, 3
+; CHECK-RV64-NEXT:    add a0, sp, a0
+; CHECK-RV64-NEXT:    addi a0, a0, 16
+; CHECK-RV64-NEXT:    vs8r.v v8, (a0) # Unknown-size Folded Spill
+; CHECK-RV64-NEXT:    viota.m v16, v7
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    li a1, 24
+; CHECK-RV64-NEXT:    mul a0, a0, a1
+; CHECK-RV64-NEXT:    add a0, sp, a0
+; CHECK-RV64-NEXT:    addi a0, a0, 16
+; CHECK-RV64-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
+; CHECK-RV64-NEXT:    vmv1r.v v0, v7
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    slli a0, a0, 5
+; CHECK-RV64-NEXT:    add a0, sp, a0
+; CHECK-RV64-NEXT:    addi a0, a0, 16
+; CHECK-RV64-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    slli a0, a0, 4
+; CHECK-RV64-NEXT:    add a0, sp, a0
+; CHECK-RV64-NEXT:    addi a0, a0, 16
+; CHECK-RV64-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    li a1, 24
+; CHECK-RV64-NEXT:    mul a0, a0, a1
+; CHECK-RV64-NEXT:    add a0, sp, a0
+; CHECK-RV64-NEXT:    addi a0, a0, 16
+; CHECK-RV64-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
+; CHECK-RV64-NEXT:    vrgather.vv v16, v24, v8, v0.t
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    slli a0, a0, 3
+; CHECK-RV64-NEXT:    add a0, sp, a0
+; CHECK-RV64-NEXT:    addi a0, a0, 16
+; CHECK-RV64-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    li a1, 40
+; CHECK-RV64-NEXT:    mul a0, a0, a1
+; CHECK-RV64-NEXT:    add sp, sp, a0
+; CHECK-RV64-NEXT:    addi sp, sp, 16
+; CHECK-RV64-NEXT:    ret
   %res = call <128 x i16> @llvm.masked.expandload.v128i16(ptr align 2 %base, <128 x i1> %mask, <128 x i16> %passthru)
   ret <128 x i16> %res
 }
 
 define <128 x i16> @test_expandload_v128i16_all_ones(ptr %base, <128 x i16> %passthru) {
-; CHECK-VRGATHER-RV32-LABEL: test_expandload_v128i16_all_ones:
-; CHECK-VRGATHER-RV32:       # %bb.0:
-; CHECK-VRGATHER-RV32-NEXT:    li a1, 64
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vle16.v v8, (a0)
-; CHECK-VRGATHER-RV32-NEXT:    vmset.m v16
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 32
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vsrl.vx v17, v16, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a2, v17
-; CHECK-VRGATHER-RV32-NEXT:    cpop a2, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a3, v16
-; CHECK-VRGATHER-RV32-NEXT:    cpop a3, a3
-; CHECK-VRGATHER-RV32-NEXT:    add a2, a3, a2
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a2, 1
-; CHECK-VRGATHER-RV32-NEXT:    add a0, a0, a2
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vle16.v v16, (a0)
-; CHECK-VRGATHER-RV32-NEXT:    ret
-;
-; CHECK-VRGATHER-RV64-LABEL: test_expandload_v128i16_all_ones:
-; CHECK-VRGATHER-RV64:       # %bb.0:
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 64
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vle16.v v8, (a0)
-; CHECK-VRGATHER-RV64-NEXT:    vmset.m v16
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.x.s a2, v16
-; CHECK-VRGATHER-RV64-NEXT:    cpop a2, a2
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a2, 1
-; CHECK-VRGATHER-RV64-NEXT:    add a0, a0, a2
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vle16.v v16, (a0)
-; CHECK-VRGATHER-RV64-NEXT:    ret
+; CHECK-RV32-LABEL: test_expandload_v128i16_all_ones:
+; CHECK-RV32:       # %bb.0:
+; CHECK-RV32-NEXT:    li a1, 64
+; CHECK-RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; CHECK-RV32-NEXT:    vle16.v v8, (a0)
+; CHECK-RV32-NEXT:    vmset.m v16
+; CHECK-RV32-NEXT:    li a2, 32
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vsrl.vx v17, v16, a2
+; CHECK-RV32-NEXT:    vmv.x.s a2, v17
+; CHECK-RV32-NEXT:    cpop a2, a2
+; CHECK-RV32-NEXT:    vmv.x.s a3, v16
+; CHECK-RV32-NEXT:    cpop a3, a3
+; CHECK-RV32-NEXT:    add a2, a3, a2
+; CHECK-RV32-NEXT:    slli a2, a2, 1
+; CHECK-RV32-NEXT:    add a0, a0, a2
+; CHECK-RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; CHECK-RV32-NEXT:    vle16.v v16, (a0)
+; CHECK-RV32-NEXT:    ret
 ;
-; CHECK-INDEXED-RV32-LABEL: test_expandload_v128i16_all_ones:
-; CHECK-INDEXED-RV32:       # %bb.0:
-; CHECK-INDEXED-RV32-NEXT:    li a1, 64
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vle16.v v8, (a0)
-; CHECK-INDEXED-RV32-NEXT:    vmset.m v16
-; CHECK-INDEXED-RV32-NEXT:    li a2, 32
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vsrl.vx v17, v16, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a2, v17
-; CHECK-INDEXED-RV32-NEXT:    cpop a2, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a3, v16
-; CHECK-INDEXED-RV32-NEXT:    cpop a3, a3
-; CHECK-INDEXED-RV32-NEXT:    add a2, a3, a2
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a2, 1
-; CHECK-INDEXED-RV32-NEXT:    add a0, a0, a2
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vle16.v v16, (a0)
-; CHECK-INDEXED-RV32-NEXT:    ret
-;
-; CHECK-INDEXED-RV64-LABEL: test_expandload_v128i16_all_ones:
-; CHECK-INDEXED-RV64:       # %bb.0:
-; CHECK-INDEXED-RV64-NEXT:    li a1, 64
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vle16.v v8, (a0)
-; CHECK-INDEXED-RV64-NEXT:    vmset.m v16
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.x.s a2, v16
-; CHECK-INDEXED-RV64-NEXT:    cpop a2, a2
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a2, 1
-; CHECK-INDEXED-RV64-NEXT:    add a0, a0, a2
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vle16.v v16, (a0)
-; CHECK-INDEXED-RV64-NEXT:    ret
+; CHECK-RV64-LABEL: test_expandload_v128i16_all_ones:
+; CHECK-RV64:       # %bb.0:
+; CHECK-RV64-NEXT:    li a1, 64
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; CHECK-RV64-NEXT:    vle16.v v8, (a0)
+; CHECK-RV64-NEXT:    vmset.m v16
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.x.s a2, v16
+; CHECK-RV64-NEXT:    cpop a2, a2
+; CHECK-RV64-NEXT:    slli a2, a2, 1
+; CHECK-RV64-NEXT:    add a0, a0, a2
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; CHECK-RV64-NEXT:    vle16.v v16, (a0)
+; CHECK-RV64-NEXT:    ret
   %res = call <128 x i16> @llvm.masked.expandload.v128i16(ptr align 2 %base, <128 x i1> splat (i1 true), <128 x i16> %passthru)
   ret <128 x i16> %res
 }
@@ -1320,25 +904,16 @@ define <128 x i16> @test_expandload_v128i16_all_ones(ptr %base, <128 x i16> %pas
 ; Load + expand for i32 type
 
 define <1 x i32> @test_expandload_v1i32(ptr %base, <1 x i1> %mask, <1 x i32> %passthru) {
-; CHECK-VRGATHER-LABEL: test_expandload_v1i32:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e8, mf8, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e32, mf2, ta, ma
-; CHECK-VRGATHER-NEXT:    vle32.v v9, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e32, mf2, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v10, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: test_expandload_v1i32:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    vsetivli zero, 1, e32, mf2, ta, ma
-; CHECK-INDEXED-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 2, v0.t
-; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e32, mf2, ta, mu
-; CHECK-INDEXED-NEXT:    vluxei32.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: test_expandload_v1i32:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 1, e8, mf8, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e32, mf2, ta, ma
+; CHECK-NEXT:    vle32.v v9, (a0)
+; CHECK-NEXT:    vsetivli zero, 1, e32, mf2, ta, mu
+; CHECK-NEXT:    viota.m v10, v0
+; CHECK-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-NEXT:    ret
   %res = call <1 x i32> @llvm.masked.expandload.v1i32(ptr align 4 %base, <1 x i1> %mask, <1 x i32> %passthru)
   ret <1 x i32> %res
 }
@@ -1354,25 +929,16 @@ define <1 x i32> @test_expandload_v1i32_all_ones(ptr %base, <1 x i32> %passthru)
 }
 
 define <2 x i32> @test_expandload_v2i32(ptr %base, <2 x i1> %mask, <2 x i32> %passthru) {
-; CHECK-VRGATHER-LABEL: test_expandload_v2i32:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e32, mf2, ta, ma
-; CHECK-VRGATHER-NEXT:    vle32.v v9, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e32, mf2, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v10, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: test_expandload_v2i32:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
-; CHECK-INDEXED-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 2, v0.t
-; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e32, mf2, ta, mu
-; CHECK-INDEXED-NEXT:    vluxei32.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: test_expandload_v2i32:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e32, mf2, ta, ma
+; CHECK-NEXT:    vle32.v v9, (a0)
+; CHECK-NEXT:    vsetivli zero, 2, e32, mf2, ta, mu
+; CHECK-NEXT:    viota.m v10, v0
+; CHECK-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-NEXT:    ret
   %res = call <2 x i32> @llvm.masked.expandload.v2i32(ptr align 4 %base, <2 x i1> %mask, <2 x i32> %passthru)
   ret <2 x i32> %res
 }
@@ -1388,25 +954,16 @@ define <2 x i32> @test_expandload_v2i32_all_ones(ptr %base, <2 x i32> %passthru)
 }
 
 define <4 x i32> @test_expandload_v4i32(ptr %base, <4 x i1> %mask, <4 x i32> %passthru) {
-; CHECK-VRGATHER-LABEL: test_expandload_v4i32:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e8, mf4, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e32, m1, ta, ma
-; CHECK-VRGATHER-NEXT:    vle32.v v9, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e32, m1, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v10, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: test_expandload_v4i32:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
-; CHECK-INDEXED-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 2, v0.t
-; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e32, m1, ta, mu
-; CHECK-INDEXED-NEXT:    vluxei32.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: test_expandload_v4i32:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 4, e8, mf4, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e32, m1, ta, ma
+; CHECK-NEXT:    vle32.v v9, (a0)
+; CHECK-NEXT:    vsetivli zero, 4, e32, m1, ta, mu
+; CHECK-NEXT:    viota.m v10, v0
+; CHECK-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-NEXT:    ret
   %res = call <4 x i32> @llvm.masked.expandload.v4i32(ptr align 4 %base, <4 x i1> %mask, <4 x i32> %passthru)
   ret <4 x i32> %res
 }
@@ -1422,25 +979,16 @@ define <4 x i32> @test_expandload_v4i32_all_ones(ptr %base, <4 x i32> %passthru)
 }
 
 define <8 x i32> @test_expandload_v8i32(ptr %base, <8 x i1> %mask, <8 x i32> %passthru) {
-; CHECK-VRGATHER-LABEL: test_expandload_v8i32:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e32, m2, ta, ma
-; CHECK-VRGATHER-NEXT:    vle32.v v10, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e32, m2, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v12, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v10, v12, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: test_expandload_v8i32:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
-; CHECK-INDEXED-NEXT:    viota.m v10, v0
-; CHECK-INDEXED-NEXT:    vsll.vi v10, v10, 2, v0.t
-; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e32, m2, ta, mu
-; CHECK-INDEXED-NEXT:    vluxei32.v v8, (a0), v10, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: test_expandload_v8i32:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e32, m2, ta, ma
+; CHECK-NEXT:    vle32.v v10, (a0)
+; CHECK-NEXT:    vsetivli zero, 8, e32, m2, ta, mu
+; CHECK-NEXT:    viota.m v12, v0
+; CHECK-NEXT:    vrgather.vv v8, v10, v12, v0.t
+; CHECK-NEXT:    ret
   %res = call <8 x i32> @llvm.masked.expandload.v8i32(ptr align 4 %base, <8 x i1> %mask, <8 x i32> %passthru)
   ret <8 x i32> %res
 }
@@ -1456,25 +1004,16 @@ define <8 x i32> @test_expandload_v8i32_all_ones(ptr %base, <8 x i32> %passthru)
 }
 
 define <16 x i32> @test_expandload_v16i32(ptr %base, <16 x i1> %mask, <16 x i32> %passthru) {
-; CHECK-VRGATHER-LABEL: test_expandload_v16i32:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 16, e8, m1, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e32, m4, ta, ma
-; CHECK-VRGATHER-NEXT:    vle32.v v12, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 16, e32, m4, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v16, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v12, v16, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: test_expandload_v16i32:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    vsetivli zero, 16, e32, m4, ta, ma
-; CHECK-INDEXED-NEXT:    viota.m v12, v0
-; CHECK-INDEXED-NEXT:    vsll.vi v12, v12, 2, v0.t
-; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e32, m4, ta, mu
-; CHECK-INDEXED-NEXT:    vluxei32.v v8, (a0), v12, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: test_expandload_v16i32:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 16, e8, m1, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e32, m4, ta, ma
+; CHECK-NEXT:    vle32.v v12, (a0)
+; CHECK-NEXT:    vsetivli zero, 16, e32, m4, ta, mu
+; CHECK-NEXT:    viota.m v16, v0
+; CHECK-NEXT:    vrgather.vv v8, v12, v16, v0.t
+; CHECK-NEXT:    ret
   %res = call <16 x i32> @llvm.masked.expandload.v16i32(ptr align 4 %base, <16 x i1> %mask, <16 x i32> %passthru)
   ret <16 x i32> %res
 }
@@ -1490,27 +1029,17 @@ define <16 x i32> @test_expandload_v16i32_all_ones(ptr %base, <16 x i32> %passth
 }
 
 define <32 x i32> @test_expandload_v32i32(ptr %base, <32 x i1> %mask, <32 x i32> %passthru) {
-; CHECK-VRGATHER-LABEL: test_expandload_v32i32:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    li a1, 32
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e8, m2, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a2, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a2, e32, m8, ta, ma
-; CHECK-VRGATHER-NEXT:    vle32.v v16, (a0)
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e32, m8, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v24, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v16, v24, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: test_expandload_v32i32:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    li a1, 32
-; CHECK-INDEXED-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
-; CHECK-INDEXED-NEXT:    viota.m v16, v0
-; CHECK-INDEXED-NEXT:    vsll.vi v16, v16, 2, v0.t
-; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e32, m8, ta, mu
-; CHECK-INDEXED-NEXT:    vluxei32.v v8, (a0), v16, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: test_expandload_v32i32:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    li a1, 32
+; CHECK-NEXT:    vsetvli zero, a1, e8, m2, ta, ma
+; CHECK-NEXT:    vcpop.m a2, v0
+; CHECK-NEXT:    vsetvli zero, a2, e32, m8, ta, ma
+; CHECK-NEXT:    vle32.v v16, (a0)
+; CHECK-NEXT:    vsetvli zero, a1, e32, m8, ta, mu
+; CHECK-NEXT:    viota.m v24, v0
+; CHECK-NEXT:    vrgather.vv v8, v16, v24, v0.t
+; CHECK-NEXT:    ret
   %res = call <32 x i32> @llvm.masked.expandload.v32i32(ptr align 4 %base, <32 x i1> %mask, <32 x i32> %passthru)
   ret <32 x i32> %res
 }
@@ -1527,337 +1056,221 @@ define <32 x i32> @test_expandload_v32i32_all_ones(ptr %base, <32 x i32> %passth
 }
 
 define <64 x i32> @test_expandload_v64i32(ptr %base, <64 x i1> %mask, <64 x i32> %passthru) {
-; CHECK-VRGATHER-RV32-LABEL: test_expandload_v64i32:
-; CHECK-VRGATHER-RV32:       # %bb.0:
-; CHECK-VRGATHER-RV32-NEXT:    addi sp, sp, -16
-; CHECK-VRGATHER-RV32-NEXT:    .cfi_def_cfa_offset 16
-; CHECK-VRGATHER-RV32-NEXT:    csrr a1, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 40
-; CHECK-VRGATHER-RV32-NEXT:    mul a1, a1, a2
-; CHECK-VRGATHER-RV32-NEXT:    sub sp, sp, a1
-; CHECK-VRGATHER-RV32-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x28, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 40 * vlenb
-; CHECK-VRGATHER-RV32-NEXT:    csrr a1, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a1, 5
-; CHECK-VRGATHER-RV32-NEXT:    add a1, sp, a1
-; CHECK-VRGATHER-RV32-NEXT:    addi a1, a1, 16
-; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV32-NEXT:    li a1, 32
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a1, e8, m2, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vcpop.m a2, v0
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e32, m8, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vle32.v v16, (a0)
-; CHECK-VRGATHER-RV32-NEXT:    csrr a2, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 24
-; CHECK-VRGATHER-RV32-NEXT:    mul a2, a2, a3
-; CHECK-VRGATHER-RV32-NEXT:    add a2, sp, a2
-; CHECK-VRGATHER-RV32-NEXT:    addi a2, a2, 16
-; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v16, (a2) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 4, e8, mf2, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslidedown.vi v7, v0, 4
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vcpop.m a2, v7
-; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a3, v0
-; CHECK-VRGATHER-RV32-NEXT:    cpop a3, a3
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a3, 2
-; CHECK-VRGATHER-RV32-NEXT:    add a0, a0, a3
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e32, m8, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vle32.v v16, (a0)
-; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 4
-; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a1, e32, m8, ta, mu
-; CHECK-VRGATHER-RV32-NEXT:    viota.m v16, v0
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, sp, 16
-; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    li a1, 24
-; CHECK-VRGATHER-RV32-NEXT:    mul a0, a0, a1
-; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, sp, 16
-; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV32-NEXT:    vrgather.vv v8, v16, v24, v0.t
-; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 3
-; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v8, (a0) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV32-NEXT:    viota.m v16, v7
-; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    li a1, 24
-; CHECK-VRGATHER-RV32-NEXT:    mul a0, a0, a1
-; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v0, v7
-; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 5
-; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 4
-; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    li a1, 24
-; CHECK-VRGATHER-RV32-NEXT:    mul a0, a0, a1
-; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV32-NEXT:    vrgather.vv v16, v24, v8, v0.t
-; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 3
-; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    li a1, 40
-; CHECK-VRGATHER-RV32-NEXT:    mul a0, a0, a1
-; CHECK-VRGATHER-RV32-NEXT:    add sp, sp, a0
-; CHECK-VRGATHER-RV32-NEXT:    addi sp, sp, 16
-; CHECK-VRGATHER-RV32-NEXT:    ret
-;
-; CHECK-VRGATHER-RV64-LABEL: test_expandload_v64i32:
-; CHECK-VRGATHER-RV64:       # %bb.0:
-; CHECK-VRGATHER-RV64-NEXT:    addi sp, sp, -16
-; CHECK-VRGATHER-RV64-NEXT:    .cfi_def_cfa_offset 16
-; CHECK-VRGATHER-RV64-NEXT:    csrr a1, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 40
-; CHECK-VRGATHER-RV64-NEXT:    mul a1, a1, a2
-; CHECK-VRGATHER-RV64-NEXT:    sub sp, sp, a1
-; CHECK-VRGATHER-RV64-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x28, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 40 * vlenb
-; CHECK-VRGATHER-RV64-NEXT:    csrr a1, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a1, 5
-; CHECK-VRGATHER-RV64-NEXT:    add a1, sp, a1
-; CHECK-VRGATHER-RV64-NEXT:    addi a1, a1, 16
-; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 32
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m2, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vcpop.m a2, v0
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e32, m8, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vle32.v v16, (a0)
-; CHECK-VRGATHER-RV64-NEXT:    csrr a2, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 24
-; CHECK-VRGATHER-RV64-NEXT:    mul a2, a2, a3
-; CHECK-VRGATHER-RV64-NEXT:    add a2, sp, a2
-; CHECK-VRGATHER-RV64-NEXT:    addi a2, a2, 16
-; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v16, (a2) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 4, e8, mf2, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslidedown.vi v7, v0, 4
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vcpop.m a2, v7
-; CHECK-VRGATHER-RV64-NEXT:    vmv.x.s a3, v0
-; CHECK-VRGATHER-RV64-NEXT:    cpopw a3, a3
-; CHECK-VRGATHER-RV64-NEXT:    slli a3, a3, 2
-; CHECK-VRGATHER-RV64-NEXT:    add a0, a0, a3
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e32, m8, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vle32.v v16, (a0)
-; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 4
-; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e32, m8, ta, mu
-; CHECK-VRGATHER-RV64-NEXT:    viota.m v16, v0
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, sp, 16
-; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 24
-; CHECK-VRGATHER-RV64-NEXT:    mul a0, a0, a1
-; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, sp, 16
-; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV64-NEXT:    vrgather.vv v8, v16, v24, v0.t
-; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 3
-; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v8, (a0) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV64-NEXT:    viota.m v16, v7
-; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 24
-; CHECK-VRGATHER-RV64-NEXT:    mul a0, a0, a1
-; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v0, v7
-; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 5
-; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 4
-; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 24
-; CHECK-VRGATHER-RV64-NEXT:    mul a0, a0, a1
-; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV64-NEXT:    vrgather.vv v16, v24, v8, v0.t
-; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 3
-; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 40
-; CHECK-VRGATHER-RV64-NEXT:    mul a0, a0, a1
-; CHECK-VRGATHER-RV64-NEXT:    add sp, sp, a0
-; CHECK-VRGATHER-RV64-NEXT:    addi sp, sp, 16
-; CHECK-VRGATHER-RV64-NEXT:    ret
+; CHECK-RV32-LABEL: test_expandload_v64i32:
+; CHECK-RV32:       # %bb.0:
+; CHECK-RV32-NEXT:    addi sp, sp, -16
+; CHECK-RV32-NEXT:    .cfi_def_cfa_offset 16
+; CHECK-RV32-NEXT:    csrr a1, vlenb
+; CHECK-RV32-NEXT:    li a2, 40
+; CHECK-RV32-NEXT:    mul a1, a1, a2
+; CHECK-RV32-NEXT:    sub sp, sp, a1
+; CHECK-RV32-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x28, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 40 * vlenb
+; CHECK-RV32-NEXT:    csrr a1, vlenb
+; CHECK-RV32-NEXT:    slli a1, a1, 5
+; CHECK-RV32-NEXT:    add a1, sp, a1
+; CHECK-RV32-NEXT:    addi a1, a1, 16
+; CHECK-RV32-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
+; CHECK-RV32-NEXT:    li a1, 32
+; CHECK-RV32-NEXT:    vsetvli zero, a1, e8, m2, ta, ma
+; CHECK-RV32-NEXT:    vcpop.m a2, v0
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e32, m8, ta, ma
+; CHECK-RV32-NEXT:    vle32.v v16, (a0)
+; CHECK-RV32-NEXT:    csrr a2, vlenb
+; CHECK-RV32-NEXT:    li a3, 24
+; CHECK-RV32-NEXT:    mul a2, a2, a3
+; CHECK-RV32-NEXT:    add a2, sp, a2
+; CHECK-RV32-NEXT:    addi a2, a2, 16
+; CHECK-RV32-NEXT:    vs8r.v v16, (a2) # Unknown-size Folded Spill
+; CHECK-RV32-NEXT:    vsetivli zero, 4, e8, mf2, ta, ma
+; CHECK-RV32-NEXT:    vslidedown.vi v7, v0, 4
+; CHECK-RV32-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
+; CHECK-RV32-NEXT:    vcpop.m a2, v7
+; CHECK-RV32-NEXT:    vmv.x.s a3, v0
+; CHECK-RV32-NEXT:    cpop a3, a3
+; CHECK-RV32-NEXT:    slli a3, a3, 2
+; CHECK-RV32-NEXT:    add a0, a0, a3
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e32, m8, ta, ma
+; CHECK-RV32-NEXT:    vle32.v v16, (a0)
+; CHECK-RV32-NEXT:    csrr a0, vlenb
+; CHECK-RV32-NEXT:    slli a0, a0, 4
+; CHECK-RV32-NEXT:    add a0, sp, a0
+; CHECK-RV32-NEXT:    addi a0, a0, 16
+; CHECK-RV32-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
+; CHECK-RV32-NEXT:    vsetvli zero, a1, e32, m8, ta, mu
+; CHECK-RV32-NEXT:    viota.m v16, v0
+; CHECK-RV32-NEXT:    addi a0, sp, 16
+; CHECK-RV32-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
+; CHECK-RV32-NEXT:    csrr a0, vlenb
+; CHECK-RV32-NEXT:    li a1, 24
+; CHECK-RV32-NEXT:    mul a0, a0, a1
+; CHECK-RV32-NEXT:    add a0, sp, a0
+; CHECK-RV32-NEXT:    addi a0, a0, 16
+; CHECK-RV32-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
+; CHECK-RV32-NEXT:    addi a0, sp, 16
+; CHECK-RV32-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
+; CHECK-RV32-NEXT:    vrgather.vv v8, v16, v24, v0.t
+; CHECK-RV32-NEXT:    csrr a0, vlenb
+; CHECK-RV32-NEXT:    slli a0, a0, 3
+; CHECK-RV32-NEXT:    add a0, sp, a0
+; CHECK-RV32-NEXT:    addi a0, a0, 16
+; CHECK-RV32-NEXT:    vs8r.v v8, (a0) # Unknown-size Folded Spill
+; CHECK-RV32-NEXT:    viota.m v16, v7
+; CHECK-RV32-NEXT:    csrr a0, vlenb
+; CHECK-RV32-NEXT:    li a1, 24
+; CHECK-RV32-NEXT:    mul a0, a0, a1
+; CHECK-RV32-NEXT:    add a0, sp, a0
+; CHECK-RV32-NEXT:    addi a0, a0, 16
+; CHECK-RV32-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
+; CHECK-RV32-NEXT:    vmv1r.v v0, v7
+; CHECK-RV32-NEXT:    csrr a0, vlenb
+; CHECK-RV32-NEXT:    slli a0, a0, 5
+; CHECK-RV32-NEXT:    add a0, sp, a0
+; CHECK-RV32-NEXT:    addi a0, a0, 16
+; CHECK-RV32-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
+; CHECK-RV32-NEXT:    csrr a0, vlenb
+; CHECK-RV32-NEXT:    slli a0, a0, 4
+; CHECK-RV32-NEXT:    add a0, sp, a0
+; CHECK-RV32-NEXT:    addi a0, a0, 16
+; CHECK-RV32-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
+; CHECK-RV32-NEXT:    csrr a0, vlenb
+; CHECK-RV32-NEXT:    li a1, 24
+; CHECK-RV32-NEXT:    mul a0, a0, a1
+; CHECK-RV32-NEXT:    add a0, sp, a0
+; CHECK-RV32-NEXT:    addi a0, a0, 16
+; CHECK-RV32-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
+; CHECK-RV32-NEXT:    vrgather.vv v16, v24, v8, v0.t
+; CHECK-RV32-NEXT:    csrr a0, vlenb
+; CHECK-RV32-NEXT:    slli a0, a0, 3
+; CHECK-RV32-NEXT:    add a0, sp, a0
+; CHECK-RV32-NEXT:    addi a0, a0, 16
+; CHECK-RV32-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
+; CHECK-RV32-NEXT:    csrr a0, vlenb
+; CHECK-RV32-NEXT:    li a1, 40
+; CHECK-RV32-NEXT:    mul a0, a0, a1
+; CHECK-RV32-NEXT:    add sp, sp, a0
+; CHECK-RV32-NEXT:    addi sp, sp, 16
+; CHECK-RV32-NEXT:    ret
 ;
-; CHECK-INDEXED-RV32-LABEL: test_expandload_v64i32:
-; CHECK-INDEXED-RV32:       # %bb.0:
-; CHECK-INDEXED-RV32-NEXT:    addi sp, sp, -16
-; CHECK-INDEXED-RV32-NEXT:    .cfi_def_cfa_offset 16
-; CHECK-INDEXED-RV32-NEXT:    csrr a1, vlenb
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a1, 4
-; CHECK-INDEXED-RV32-NEXT:    sub sp, sp, a1
-; CHECK-INDEXED-RV32-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
-; CHECK-INDEXED-RV32-NEXT:    csrr a1, vlenb
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a1, 3
-; CHECK-INDEXED-RV32-NEXT:    add a1, sp, a1
-; CHECK-INDEXED-RV32-NEXT:    addi a1, a1, 16
-; CHECK-INDEXED-RV32-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v7, v0
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 4, e8, mf2, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vslidedown.vi v0, v0, 4
-; CHECK-INDEXED-RV32-NEXT:    li a1, 32
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    viota.m v16, v0
-; CHECK-INDEXED-RV32-NEXT:    vsll.vi v16, v16, 2, v0.t
-; CHECK-INDEXED-RV32-NEXT:    addi a1, sp, 16
-; CHECK-INDEXED-RV32-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
-; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a1, v7
-; CHECK-INDEXED-RV32-NEXT:    cpop a1, a1
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a1, 2
-; CHECK-INDEXED-RV32-NEXT:    add a1, a0, a1
-; CHECK-INDEXED-RV32-NEXT:    csrr a2, vlenb
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a2, 3
-; CHECK-INDEXED-RV32-NEXT:    add a2, sp, a2
-; CHECK-INDEXED-RV32-NEXT:    addi a2, a2, 16
-; CHECK-INDEXED-RV32-NEXT:    vl8r.v v16, (a2) # Unknown-size Folded Reload
-; CHECK-INDEXED-RV32-NEXT:    addi a2, sp, 16
-; CHECK-INDEXED-RV32-NEXT:    vl8r.v v24, (a2) # Unknown-size Folded Reload
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e32, m8, ta, mu
-; CHECK-INDEXED-RV32-NEXT:    vluxei32.v v16, (a1), v24, v0.t
-; CHECK-INDEXED-RV32-NEXT:    viota.m v24, v7
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v0, v7
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e32, m8, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vsll.vi v24, v24, 2, v0.t
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e32, m8, ta, mu
-; CHECK-INDEXED-RV32-NEXT:    vluxei32.v v8, (a0), v24, v0.t
-; CHECK-INDEXED-RV32-NEXT:    csrr a0, vlenb
-; CHECK-INDEXED-RV32-NEXT:    slli a0, a0, 4
-; CHECK-INDEXED-RV32-NEXT:    add sp, sp, a0
-; CHECK-INDEXED-RV32-NEXT:    addi sp, sp, 16
-; CHECK-INDEXED-RV32-NEXT:    ret
-;
-; CHECK-INDEXED-RV64-LABEL: test_expandload_v64i32:
-; CHECK-INDEXED-RV64:       # %bb.0:
-; CHECK-INDEXED-RV64-NEXT:    addi sp, sp, -16
-; CHECK-INDEXED-RV64-NEXT:    .cfi_def_cfa_offset 16
-; CHECK-INDEXED-RV64-NEXT:    csrr a1, vlenb
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a1, 4
-; CHECK-INDEXED-RV64-NEXT:    sub sp, sp, a1
-; CHECK-INDEXED-RV64-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
-; CHECK-INDEXED-RV64-NEXT:    csrr a1, vlenb
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a1, 3
-; CHECK-INDEXED-RV64-NEXT:    add a1, sp, a1
-; CHECK-INDEXED-RV64-NEXT:    addi a1, a1, 16
-; CHECK-INDEXED-RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v7, v0
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 4, e8, mf2, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vslidedown.vi v0, v0, 4
-; CHECK-INDEXED-RV64-NEXT:    li a1, 32
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    viota.m v16, v0
-; CHECK-INDEXED-RV64-NEXT:    vsll.vi v16, v16, 2, v0.t
-; CHECK-INDEXED-RV64-NEXT:    addi a1, sp, 16
-; CHECK-INDEXED-RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
-; CHECK-INDEXED-RV64-NEXT:    vmv.x.s a1, v7
-; CHECK-INDEXED-RV64-NEXT:    cpopw a1, a1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a1, 2
-; CHECK-INDEXED-RV64-NEXT:    add a1, a0, a1
-; CHECK-INDEXED-RV64-NEXT:    csrr a2, vlenb
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a2, 3
-; CHECK-INDEXED-RV64-NEXT:    add a2, sp, a2
-; CHECK-INDEXED-RV64-NEXT:    addi a2, a2, 16
-; CHECK-INDEXED-RV64-NEXT:    vl8r.v v16, (a2) # Unknown-size Folded Reload
-; CHECK-INDEXED-RV64-NEXT:    addi a2, sp, 16
-; CHECK-INDEXED-RV64-NEXT:    vl8r.v v24, (a2) # Unknown-size Folded Reload
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e32, m8, ta, mu
-; CHECK-INDEXED-RV64-NEXT:    vluxei32.v v16, (a1), v24, v0.t
-; CHECK-INDEXED-RV64-NEXT:    viota.m v24, v7
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v0, v7
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e32, m8, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vsll.vi v24, v24, 2, v0.t
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e32, m8, ta, mu
-; CHECK-INDEXED-RV64-NEXT:    vluxei32.v v8, (a0), v24, v0.t
-; CHECK-INDEXED-RV64-NEXT:    csrr a0, vlenb
-; CHECK-INDEXED-RV64-NEXT:    slli a0, a0, 4
-; CHECK-INDEXED-RV64-NEXT:    add sp, sp, a0
-; CHECK-INDEXED-RV64-NEXT:    addi sp, sp, 16
-; CHECK-INDEXED-RV64-NEXT:    ret
+; CHECK-RV64-LABEL: test_expandload_v64i32:
+; CHECK-RV64:       # %bb.0:
+; CHECK-RV64-NEXT:    addi sp, sp, -16
+; CHECK-RV64-NEXT:    .cfi_def_cfa_offset 16
+; CHECK-RV64-NEXT:    csrr a1, vlenb
+; CHECK-RV64-NEXT:    li a2, 40
+; CHECK-RV64-NEXT:    mul a1, a1, a2
+; CHECK-RV64-NEXT:    sub sp, sp, a1
+; CHECK-RV64-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x28, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 40 * vlenb
+; CHECK-RV64-NEXT:    csrr a1, vlenb
+; CHECK-RV64-NEXT:    slli a1, a1, 5
+; CHECK-RV64-NEXT:    add a1, sp, a1
+; CHECK-RV64-NEXT:    addi a1, a1, 16
+; CHECK-RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
+; CHECK-RV64-NEXT:    li a1, 32
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m2, ta, ma
+; CHECK-RV64-NEXT:    vcpop.m a2, v0
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e32, m8, ta, ma
+; CHECK-RV64-NEXT:    vle32.v v16, (a0)
+; CHECK-RV64-NEXT:    csrr a2, vlenb
+; CHECK-RV64-NEXT:    li a3, 24
+; CHECK-RV64-NEXT:    mul a2, a2, a3
+; CHECK-RV64-NEXT:    add a2, sp, a2
+; CHECK-RV64-NEXT:    addi a2, a2, 16
+; CHECK-RV64-NEXT:    vs8r.v v16, (a2) # Unknown-size Folded Spill
+; CHECK-RV64-NEXT:    vsetivli zero, 4, e8, mf2, ta, ma
+; CHECK-RV64-NEXT:    vslidedown.vi v7, v0, 4
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
+; CHECK-RV64-NEXT:    vcpop.m a2, v7
+; CHECK-RV64-NEXT:    vmv.x.s a3, v0
+; CHECK-RV64-NEXT:    cpopw a3, a3
+; CHECK-RV64-NEXT:    slli a3, a3, 2
+; CHECK-RV64-NEXT:    add a0, a0, a3
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e32, m8, ta, ma
+; CHECK-RV64-NEXT:    vle32.v v16, (a0)
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    slli a0, a0, 4
+; CHECK-RV64-NEXT:    add a0, sp, a0
+; CHECK-RV64-NEXT:    addi a0, a0, 16
+; CHECK-RV64-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e32, m8, ta, mu
+; CHECK-RV64-NEXT:    viota.m v16, v0
+; CHECK-RV64-NEXT:    addi a0, sp, 16
+; CHECK-RV64-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    li a1, 24
+; CHECK-RV64-NEXT:    mul a0, a0, a1
+; CHECK-RV64-NEXT:    add a0, sp, a0
+; CHECK-RV64-NEXT:    addi a0, a0, 16
+; CHECK-RV64-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
+; CHECK-RV64-NEXT:    addi a0, sp, 16
+; CHECK-RV64-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
+; CHECK-RV64-NEXT:    vrgather.vv v8, v16, v24, v0.t
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    slli a0, a0, 3
+; CHECK-RV64-NEXT:    add a0, sp, a0
+; CHECK-RV64-NEXT:    addi a0, a0, 16
+; CHECK-RV64-NEXT:    vs8r.v v8, (a0) # Unknown-size Folded Spill
+; CHECK-RV64-NEXT:    viota.m v16, v7
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    li a1, 24
+; CHECK-RV64-NEXT:    mul a0, a0, a1
+; CHECK-RV64-NEXT:    add a0, sp, a0
+; CHECK-RV64-NEXT:    addi a0, a0, 16
+; CHECK-RV64-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
+; CHECK-RV64-NEXT:    vmv1r.v v0, v7
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    slli a0, a0, 5
+; CHECK-RV64-NEXT:    add a0, sp, a0
+; CHECK-RV64-NEXT:    addi a0, a0, 16
+; CHECK-RV64-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    slli a0, a0, 4
+; CHECK-RV64-NEXT:    add a0, sp, a0
+; CHECK-RV64-NEXT:    addi a0, a0, 16
+; CHECK-RV64-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    li a1, 24
+; CHECK-RV64-NEXT:    mul a0, a0, a1
+; CHECK-RV64-NEXT:    add a0, sp, a0
+; CHECK-RV64-NEXT:    addi a0, a0, 16
+; CHECK-RV64-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
+; CHECK-RV64-NEXT:    vrgather.vv v16, v24, v8, v0.t
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    slli a0, a0, 3
+; CHECK-RV64-NEXT:    add a0, sp, a0
+; CHECK-RV64-NEXT:    addi a0, a0, 16
+; CHECK-RV64-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    li a1, 40
+; CHECK-RV64-NEXT:    mul a0, a0, a1
+; CHECK-RV64-NEXT:    add sp, sp, a0
+; CHECK-RV64-NEXT:    addi sp, sp, 16
+; CHECK-RV64-NEXT:    ret
   %res = call <64 x i32> @llvm.masked.expandload.v64i32(ptr align 4 %base, <64 x i1> %mask, <64 x i32> %passthru)
   ret <64 x i32> %res
 }
 
 define <64 x i32> @test_expandload_v64i32_all_ones(ptr %base, <64 x i32> %passthru) {
-; CHECK-VRGATHER-RV32-LABEL: test_expandload_v64i32_all_ones:
-; CHECK-VRGATHER-RV32:       # %bb.0:
-; CHECK-VRGATHER-RV32-NEXT:    li a1, 32
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vle32.v v8, (a0)
-; CHECK-VRGATHER-RV32-NEXT:    vmset.m v16
-; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a1, v16
-; CHECK-VRGATHER-RV32-NEXT:    cpop a1, a1
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a1, 2
-; CHECK-VRGATHER-RV32-NEXT:    add a0, a0, a1
-; CHECK-VRGATHER-RV32-NEXT:    vle32.v v16, (a0)
-; CHECK-VRGATHER-RV32-NEXT:    ret
-;
-; CHECK-VRGATHER-RV64-LABEL: test_expandload_v64i32_all_ones:
-; CHECK-VRGATHER-RV64:       # %bb.0:
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 32
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vle32.v v8, (a0)
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 128
-; CHECK-VRGATHER-RV64-NEXT:    vle32.v v16, (a0)
-; CHECK-VRGATHER-RV64-NEXT:    ret
+; CHECK-RV32-LABEL: test_expandload_v64i32_all_ones:
+; CHECK-RV32:       # %bb.0:
+; CHECK-RV32-NEXT:    li a1, 32
+; CHECK-RV32-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
+; CHECK-RV32-NEXT:    vle32.v v8, (a0)
+; CHECK-RV32-NEXT:    vmset.m v16
+; CHECK-RV32-NEXT:    vmv.x.s a1, v16
+; CHECK-RV32-NEXT:    cpop a1, a1
+; CHECK-RV32-NEXT:    slli a1, a1, 2
+; CHECK-RV32-NEXT:    add a0, a0, a1
+; CHECK-RV32-NEXT:    vle32.v v16, (a0)
+; CHECK-RV32-NEXT:    ret
 ;
-; CHECK-INDEXED-RV32-LABEL: test_expandload_v64i32_all_ones:
-; CHECK-INDEXED-RV32:       # %bb.0:
-; CHECK-INDEXED-RV32-NEXT:    li a1, 32
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vle32.v v8, (a0)
-; CHECK-INDEXED-RV32-NEXT:    vmset.m v16
-; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a1, v16
-; CHECK-INDEXED-RV32-NEXT:    cpop a1, a1
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a1, 2
-; CHECK-INDEXED-RV32-NEXT:    add a0, a0, a1
-; CHECK-INDEXED-RV32-NEXT:    vle32.v v16, (a0)
-; CHECK-INDEXED-RV32-NEXT:    ret
-;
-; CHECK-INDEXED-RV64-LABEL: test_expandload_v64i32_all_ones:
-; CHECK-INDEXED-RV64:       # %bb.0:
-; CHECK-INDEXED-RV64-NEXT:    li a1, 32
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vle32.v v8, (a0)
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 128
-; CHECK-INDEXED-RV64-NEXT:    vle32.v v16, (a0)
-; CHECK-INDEXED-RV64-NEXT:    ret
+; CHECK-RV64-LABEL: test_expandload_v64i32_all_ones:
+; CHECK-RV64:       # %bb.0:
+; CHECK-RV64-NEXT:    li a1, 32
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e32, m8, ta, ma
+; CHECK-RV64-NEXT:    vle32.v v8, (a0)
+; CHECK-RV64-NEXT:    addi a0, a0, 128
+; CHECK-RV64-NEXT:    vle32.v v16, (a0)
+; CHECK-RV64-NEXT:    ret
   %res = call <64 x i32> @llvm.masked.expandload.v64i32(ptr align 4 %base, <64 x i1> splat (i1 true), <64 x i32> %passthru)
   ret <64 x i32> %res
 }
@@ -1865,34 +1278,16 @@ define <64 x i32> @test_expandload_v64i32_all_ones(ptr %base, <64 x i32> %passth
 ; Load + expand for i64 type
 
 define <1 x i64> @test_expandload_v1i64(ptr %base, <1 x i1> %mask, <1 x i64> %passthru) {
-; CHECK-VRGATHER-LABEL: test_expandload_v1i64:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e8, mf8, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
-; CHECK-VRGATHER-NEXT:    vle64.v v9, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e64, m1, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v10, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-RV32-LABEL: test_expandload_v1i64:
-; CHECK-INDEXED-RV32:       # %bb.0:
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e32, mf2, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-RV32-NEXT:    vsll.vi v9, v9, 3, v0.t
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
-; CHECK-INDEXED-RV32-NEXT:    vluxei32.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-RV32-NEXT:    ret
-;
-; CHECK-INDEXED-RV64-LABEL: test_expandload_v1i64:
-; CHECK-INDEXED-RV64:       # %bb.0:
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-RV64-NEXT:    vsll.vi v9, v9, 3, v0.t
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
-; CHECK-INDEXED-RV64-NEXT:    vluxei64.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-RV64-NEXT:    ret
+; CHECK-LABEL: test_expandload_v1i64:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 1, e8, mf8, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
+; CHECK-NEXT:    vle64.v v9, (a0)
+; CHECK-NEXT:    vsetivli zero, 1, e64, m1, ta, mu
+; CHECK-NEXT:    viota.m v10, v0
+; CHECK-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-NEXT:    ret
   %res = call <1 x i64> @llvm.masked.expandload.v1i64(ptr align 8 %base, <1 x i1> %mask, <1 x i64> %passthru)
   ret <1 x i64> %res
 }
@@ -1908,34 +1303,16 @@ define <1 x i64> @test_expandload_v1i64_all_ones(ptr %base, <1 x i64> %passthru)
 }
 
 define <2 x i64> @test_expandload_v2i64(ptr %base, <2 x i1> %mask, <2 x i64> %passthru) {
-; CHECK-VRGATHER-LABEL: test_expandload_v2i64:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
-; CHECK-VRGATHER-NEXT:    vle64.v v9, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e64, m1, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v10, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-RV32-LABEL: test_expandload_v2i64:
-; CHECK-INDEXED-RV32:       # %bb.0:
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-RV32-NEXT:    vsll.vi v9, v9, 3, v0.t
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
-; CHECK-INDEXED-RV32-NEXT:    vluxei32.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-RV32-NEXT:    ret
-;
-; CHECK-INDEXED-RV64-LABEL: test_expandload_v2i64:
-; CHECK-INDEXED-RV64:       # %bb.0:
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 2, e64, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-RV64-NEXT:    vsll.vi v9, v9, 3, v0.t
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
-; CHECK-INDEXED-RV64-NEXT:    vluxei64.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-RV64-NEXT:    ret
+; CHECK-LABEL: test_expandload_v2i64:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
+; CHECK-NEXT:    vle64.v v9, (a0)
+; CHECK-NEXT:    vsetivli zero, 2, e64, m1, ta, mu
+; CHECK-NEXT:    viota.m v10, v0
+; CHECK-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-NEXT:    ret
   %res = call <2 x i64> @llvm.masked.expandload.v2i64(ptr align 8 %base, <2 x i1> %mask, <2 x i64> %passthru)
   ret <2 x i64> %res
 }
@@ -1951,34 +1328,16 @@ define <2 x i64> @test_expandload_v2i64_all_ones(ptr %base, <2 x i64> %passthru)
 }
 
 define <4 x i64> @test_expandload_v4i64(ptr %base, <4 x i1> %mask, <4 x i64> %passthru) {
-; CHECK-VRGATHER-LABEL: test_expandload_v4i64:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e8, mf4, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e64, m2, ta, ma
-; CHECK-VRGATHER-NEXT:    vle64.v v10, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e64, m2, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v12, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v10, v12, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-RV32-LABEL: test_expandload_v4i64:
-; CHECK-INDEXED-RV32:       # %bb.0:
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    viota.m v10, v0
-; CHECK-INDEXED-RV32-NEXT:    vsll.vi v10, v10, 3, v0.t
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e64, m2, ta, mu
-; CHECK-INDEXED-RV32-NEXT:    vluxei32.v v8, (a0), v10, v0.t
-; CHECK-INDEXED-RV32-NEXT:    ret
-;
-; CHECK-INDEXED-RV64-LABEL: test_expandload_v4i64:
-; CHECK-INDEXED-RV64:       # %bb.0:
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 4, e64, m2, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    viota.m v10, v0
-; CHECK-INDEXED-RV64-NEXT:    vsll.vi v10, v10, 3, v0.t
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e64, m2, ta, mu
-; CHECK-INDEXED-RV64-NEXT:    vluxei64.v v8, (a0), v10, v0.t
-; CHECK-INDEXED-RV64-NEXT:    ret
+; CHECK-LABEL: test_expandload_v4i64:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 4, e8, mf4, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e64, m2, ta, ma
+; CHECK-NEXT:    vle64.v v10, (a0)
+; CHECK-NEXT:    vsetivli zero, 4, e64, m2, ta, mu
+; CHECK-NEXT:    viota.m v12, v0
+; CHECK-NEXT:    vrgather.vv v8, v10, v12, v0.t
+; CHECK-NEXT:    ret
   %res = call <4 x i64> @llvm.masked.expandload.v4i64(ptr align 8 %base, <4 x i1> %mask, <4 x i64> %passthru)
   ret <4 x i64> %res
 }
@@ -1994,34 +1353,16 @@ define <4 x i64> @test_expandload_v4i64_all_ones(ptr %base, <4 x i64> %passthru)
 }
 
 define <8 x i64> @test_expandload_v8i64(ptr %base, <8 x i1> %mask, <8 x i64> %passthru) {
-; CHECK-VRGATHER-LABEL: test_expandload_v8i64:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e64, m4, ta, ma
-; CHECK-VRGATHER-NEXT:    vle64.v v12, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e64, m4, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v16, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v12, v16, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-RV32-LABEL: test_expandload_v8i64:
-; CHECK-INDEXED-RV32:       # %bb.0:
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    viota.m v12, v0
-; CHECK-INDEXED-RV32-NEXT:    vsll.vi v12, v12, 3, v0.t
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e64, m4, ta, mu
-; CHECK-INDEXED-RV32-NEXT:    vluxei32.v v8, (a0), v12, v0.t
-; CHECK-INDEXED-RV32-NEXT:    ret
-;
-; CHECK-INDEXED-RV64-LABEL: test_expandload_v8i64:
-; CHECK-INDEXED-RV64:       # %bb.0:
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 8, e64, m4, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    viota.m v12, v0
-; CHECK-INDEXED-RV64-NEXT:    vsll.vi v12, v12, 3, v0.t
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e64, m4, ta, mu
-; CHECK-INDEXED-RV64-NEXT:    vluxei64.v v8, (a0), v12, v0.t
-; CHECK-INDEXED-RV64-NEXT:    ret
+; CHECK-LABEL: test_expandload_v8i64:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e64, m4, ta, ma
+; CHECK-NEXT:    vle64.v v12, (a0)
+; CHECK-NEXT:    vsetivli zero, 8, e64, m4, ta, mu
+; CHECK-NEXT:    viota.m v16, v0
+; CHECK-NEXT:    vrgather.vv v8, v12, v16, v0.t
+; CHECK-NEXT:    ret
   %res = call <8 x i64> @llvm.masked.expandload.v8i64(ptr align 8 %base, <8 x i1> %mask, <8 x i64> %passthru)
   ret <8 x i64> %res
 }
@@ -2037,34 +1378,16 @@ define <8 x i64> @test_expandload_v8i64_all_ones(ptr %base, <8 x i64> %passthru)
 }
 
 define <16 x i64> @test_expandload_v16i64(ptr %base, <16 x i1> %mask, <16 x i64> %passthru) {
-; CHECK-VRGATHER-LABEL: test_expandload_v16i64:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 16, e8, m1, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e64, m8, ta, ma
-; CHECK-VRGATHER-NEXT:    vle64.v v16, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 16, e64, m8, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v24, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v16, v24, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-RV32-LABEL: test_expandload_v16i64:
-; CHECK-INDEXED-RV32:       # %bb.0:
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 16, e32, m4, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    viota.m v16, v0
-; CHECK-INDEXED-RV32-NEXT:    vsll.vi v16, v16, 3, v0.t
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e64, m8, ta, mu
-; CHECK-INDEXED-RV32-NEXT:    vluxei32.v v8, (a0), v16, v0.t
-; CHECK-INDEXED-RV32-NEXT:    ret
-;
-; CHECK-INDEXED-RV64-LABEL: test_expandload_v16i64:
-; CHECK-INDEXED-RV64:       # %bb.0:
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 16, e64, m8, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    viota.m v16, v0
-; CHECK-INDEXED-RV64-NEXT:    vsll.vi v16, v16, 3, v0.t
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e64, m8, ta, mu
-; CHECK-INDEXED-RV64-NEXT:    vluxei64.v v8, (a0), v16, v0.t
-; CHECK-INDEXED-RV64-NEXT:    ret
+; CHECK-LABEL: test_expandload_v16i64:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 16, e8, m1, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e64, m8, ta, ma
+; CHECK-NEXT:    vle64.v v16, (a0)
+; CHECK-NEXT:    vsetivli zero, 16, e64, m8, ta, mu
+; CHECK-NEXT:    viota.m v24, v0
+; CHECK-NEXT:    vrgather.vv v8, v16, v24, v0.t
+; CHECK-NEXT:    ret
   %res = call <16 x i64> @llvm.masked.expandload.v16i64(ptr align 8 %base, <16 x i1> %mask, <16 x i64> %passthru)
   ret <16 x i64> %res
 }
@@ -2080,267 +1403,195 @@ define <16 x i64> @test_expandload_v16i64_all_ones(ptr %base, <16 x i64> %passth
 }
 
 define <32 x i64> @test_expandload_v32i64(ptr %base, <32 x i1> %mask, <32 x i64> %passthru) {
-; CHECK-VRGATHER-RV32-LABEL: test_expandload_v32i64:
-; CHECK-VRGATHER-RV32:       # %bb.0:
-; CHECK-VRGATHER-RV32-NEXT:    addi sp, sp, -16
-; CHECK-VRGATHER-RV32-NEXT:    .cfi_def_cfa_offset 16
-; CHECK-VRGATHER-RV32-NEXT:    csrr a1, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 40
-; CHECK-VRGATHER-RV32-NEXT:    mul a1, a1, a2
-; CHECK-VRGATHER-RV32-NEXT:    sub sp, sp, a1
-; CHECK-VRGATHER-RV32-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x28, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 40 * vlenb
-; CHECK-VRGATHER-RV32-NEXT:    csrr a1, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a1, 5
-; CHECK-VRGATHER-RV32-NEXT:    add a1, sp, a1
-; CHECK-VRGATHER-RV32-NEXT:    addi a1, a1, 16
-; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 16, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a1, e16, m2, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vle64.v v16, (a0)
-; CHECK-VRGATHER-RV32-NEXT:    csrr a1, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 24
-; CHECK-VRGATHER-RV32-NEXT:    mul a1, a1, a2
-; CHECK-VRGATHER-RV32-NEXT:    add a1, sp, a1
-; CHECK-VRGATHER-RV32-NEXT:    addi a1, a1, 16
-; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a1, v0
-; CHECK-VRGATHER-RV32-NEXT:    zext.h a1, a1
-; CHECK-VRGATHER-RV32-NEXT:    cpop a1, a1
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a1, 3
-; CHECK-VRGATHER-RV32-NEXT:    add a0, a0, a1
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 2, e8, mf4, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslidedown.vi v7, v0, 2
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 16, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vcpop.m a1, v7
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a1, e64, m8, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vle64.v v16, (a0)
-; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 4
-; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 16, e64, m8, ta, mu
-; CHECK-VRGATHER-RV32-NEXT:    viota.m v16, v0
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, sp, 16
-; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    li a1, 24
-; CHECK-VRGATHER-RV32-NEXT:    mul a0, a0, a1
-; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, sp, 16
-; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV32-NEXT:    vrgather.vv v8, v16, v24, v0.t
-; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 3
-; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v8, (a0) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV32-NEXT:    viota.m v16, v7
-; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    li a1, 24
-; CHECK-VRGATHER-RV32-NEXT:    mul a0, a0, a1
-; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV32-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v0, v7
-; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 5
-; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 4
-; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    li a1, 24
-; CHECK-VRGATHER-RV32-NEXT:    mul a0, a0, a1
-; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV32-NEXT:    vrgather.vv v16, v24, v8, v0.t
-; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    slli a0, a0, 3
-; CHECK-VRGATHER-RV32-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV32-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV32-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV32-NEXT:    li a1, 40
-; CHECK-VRGATHER-RV32-NEXT:    mul a0, a0, a1
-; CHECK-VRGATHER-RV32-NEXT:    add sp, sp, a0
-; CHECK-VRGATHER-RV32-NEXT:    addi sp, sp, 16
-; CHECK-VRGATHER-RV32-NEXT:    ret
-;
-; CHECK-VRGATHER-RV64-LABEL: test_expandload_v32i64:
-; CHECK-VRGATHER-RV64:       # %bb.0:
-; CHECK-VRGATHER-RV64-NEXT:    addi sp, sp, -16
-; CHECK-VRGATHER-RV64-NEXT:    .cfi_def_cfa_offset 16
-; CHECK-VRGATHER-RV64-NEXT:    csrr a1, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 40
-; CHECK-VRGATHER-RV64-NEXT:    mul a1, a1, a2
-; CHECK-VRGATHER-RV64-NEXT:    sub sp, sp, a1
-; CHECK-VRGATHER-RV64-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x28, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 40 * vlenb
-; CHECK-VRGATHER-RV64-NEXT:    csrr a1, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a1, 5
-; CHECK-VRGATHER-RV64-NEXT:    add a1, sp, a1
-; CHECK-VRGATHER-RV64-NEXT:    addi a1, a1, 16
-; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 16, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e16, m2, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vle64.v v16, (a0)
-; CHECK-VRGATHER-RV64-NEXT:    csrr a1, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 24
-; CHECK-VRGATHER-RV64-NEXT:    mul a1, a1, a2
-; CHECK-VRGATHER-RV64-NEXT:    add a1, sp, a1
-; CHECK-VRGATHER-RV64-NEXT:    addi a1, a1, 16
-; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV64-NEXT:    vmv.x.s a1, v0
-; CHECK-VRGATHER-RV64-NEXT:    zext.h a1, a1
-; CHECK-VRGATHER-RV64-NEXT:    cpopw a1, a1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a1, 3
-; CHECK-VRGATHER-RV64-NEXT:    add a0, a0, a1
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 2, e8, mf4, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslidedown.vi v7, v0, 2
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 16, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vcpop.m a1, v7
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e64, m8, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vle64.v v16, (a0)
-; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 4
-; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 16, e64, m8, ta, mu
-; CHECK-VRGATHER-RV64-NEXT:    viota.m v16, v0
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, sp, 16
-; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 24
-; CHECK-VRGATHER-RV64-NEXT:    mul a0, a0, a1
-; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, sp, 16
-; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV64-NEXT:    vrgather.vv v8, v16, v24, v0.t
-; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 3
-; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v8, (a0) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV64-NEXT:    viota.m v16, v7
-; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 24
-; CHECK-VRGATHER-RV64-NEXT:    mul a0, a0, a1
-; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV64-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v0, v7
-; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 5
-; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 4
-; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 24
-; CHECK-VRGATHER-RV64-NEXT:    mul a0, a0, a1
-; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV64-NEXT:    vrgather.vv v16, v24, v8, v0.t
-; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    slli a0, a0, 3
-; CHECK-VRGATHER-RV64-NEXT:    add a0, sp, a0
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 16
-; CHECK-VRGATHER-RV64-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
-; CHECK-VRGATHER-RV64-NEXT:    csrr a0, vlenb
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 40
-; CHECK-VRGATHER-RV64-NEXT:    mul a0, a0, a1
-; CHECK-VRGATHER-RV64-NEXT:    add sp, sp, a0
-; CHECK-VRGATHER-RV64-NEXT:    addi sp, sp, 16
-; CHECK-VRGATHER-RV64-NEXT:    ret
+; CHECK-RV32-LABEL: test_expandload_v32i64:
+; CHECK-RV32:       # %bb.0:
+; CHECK-RV32-NEXT:    addi sp, sp, -16
+; CHECK-RV32-NEXT:    .cfi_def_cfa_offset 16
+; CHECK-RV32-NEXT:    csrr a1, vlenb
+; CHECK-RV32-NEXT:    li a2, 40
+; CHECK-RV32-NEXT:    mul a1, a1, a2
+; CHECK-RV32-NEXT:    sub sp, sp, a1
+; CHECK-RV32-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x28, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 40 * vlenb
+; CHECK-RV32-NEXT:    csrr a1, vlenb
+; CHECK-RV32-NEXT:    slli a1, a1, 5
+; CHECK-RV32-NEXT:    add a1, sp, a1
+; CHECK-RV32-NEXT:    addi a1, a1, 16
+; CHECK-RV32-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
+; CHECK-RV32-NEXT:    vsetivli zero, 16, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vcpop.m a1, v0
+; CHECK-RV32-NEXT:    vsetvli zero, a1, e16, m2, ta, ma
+; CHECK-RV32-NEXT:    vle64.v v16, (a0)
+; CHECK-RV32-NEXT:    csrr a1, vlenb
+; CHECK-RV32-NEXT:    li a2, 24
+; CHECK-RV32-NEXT:    mul a1, a1, a2
+; CHECK-RV32-NEXT:    add a1, sp, a1
+; CHECK-RV32-NEXT:    addi a1, a1, 16
+; CHECK-RV32-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
+; CHECK-RV32-NEXT:    vmv.x.s a1, v0
+; CHECK-RV32-NEXT:    zext.h a1, a1
+; CHECK-RV32-NEXT:    cpop a1, a1
+; CHECK-RV32-NEXT:    slli a1, a1, 3
+; CHECK-RV32-NEXT:    add a0, a0, a1
+; CHECK-RV32-NEXT:    vsetivli zero, 2, e8, mf4, ta, ma
+; CHECK-RV32-NEXT:    vslidedown.vi v7, v0, 2
+; CHECK-RV32-NEXT:    vsetivli zero, 16, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vcpop.m a1, v7
+; CHECK-RV32-NEXT:    vsetvli zero, a1, e64, m8, ta, ma
+; CHECK-RV32-NEXT:    vle64.v v16, (a0)
+; CHECK-RV32-NEXT:    csrr a0, vlenb
+; CHECK-RV32-NEXT:    slli a0, a0, 4
+; CHECK-RV32-NEXT:    add a0, sp, a0
+; CHECK-RV32-NEXT:    addi a0, a0, 16
+; CHECK-RV32-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
+; CHECK-RV32-NEXT:    vsetivli zero, 16, e64, m8, ta, mu
+; CHECK-RV32-NEXT:    viota.m v16, v0
+; CHECK-RV32-NEXT:    addi a0, sp, 16
+; CHECK-RV32-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
+; CHECK-RV32-NEXT:    csrr a0, vlenb
+; CHECK-RV32-NEXT:    li a1, 24
+; CHECK-RV32-NEXT:    mul a0, a0, a1
+; CHECK-RV32-NEXT:    add a0, sp, a0
+; CHECK-RV32-NEXT:    addi a0, a0, 16
+; CHECK-RV32-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
+; CHECK-RV32-NEXT:    addi a0, sp, 16
+; CHECK-RV32-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
+; CHECK-RV32-NEXT:    vrgather.vv v8, v16, v24, v0.t
+; CHECK-RV32-NEXT:    csrr a0, vlenb
+; CHECK-RV32-NEXT:    slli a0, a0, 3
+; CHECK-RV32-NEXT:    add a0, sp, a0
+; CHECK-RV32-NEXT:    addi a0, a0, 16
+; CHECK-RV32-NEXT:    vs8r.v v8, (a0) # Unknown-size Folded Spill
+; CHECK-RV32-NEXT:    viota.m v16, v7
+; CHECK-RV32-NEXT:    csrr a0, vlenb
+; CHECK-RV32-NEXT:    li a1, 24
+; CHECK-RV32-NEXT:    mul a0, a0, a1
+; CHECK-RV32-NEXT:    add a0, sp, a0
+; CHECK-RV32-NEXT:    addi a0, a0, 16
+; CHECK-RV32-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
+; CHECK-RV32-NEXT:    vmv1r.v v0, v7
+; CHECK-RV32-NEXT:    csrr a0, vlenb
+; CHECK-RV32-NEXT:    slli a0, a0, 5
+; CHECK-RV32-NEXT:    add a0, sp, a0
+; CHECK-RV32-NEXT:    addi a0, a0, 16
+; CHECK-RV32-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
+; CHECK-RV32-NEXT:    csrr a0, vlenb
+; CHECK-RV32-NEXT:    slli a0, a0, 4
+; CHECK-RV32-NEXT:    add a0, sp, a0
+; CHECK-RV32-NEXT:    addi a0, a0, 16
+; CHECK-RV32-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
+; CHECK-RV32-NEXT:    csrr a0, vlenb
+; CHECK-RV32-NEXT:    li a1, 24
+; CHECK-RV32-NEXT:    mul a0, a0, a1
+; CHECK-RV32-NEXT:    add a0, sp, a0
+; CHECK-RV32-NEXT:    addi a0, a0, 16
+; CHECK-RV32-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
+; CHECK-RV32-NEXT:    vrgather.vv v16, v24, v8, v0.t
+; CHECK-RV32-NEXT:    csrr a0, vlenb
+; CHECK-RV32-NEXT:    slli a0, a0, 3
+; CHECK-RV32-NEXT:    add a0, sp, a0
+; CHECK-RV32-NEXT:    addi a0, a0, 16
+; CHECK-RV32-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
+; CHECK-RV32-NEXT:    csrr a0, vlenb
+; CHECK-RV32-NEXT:    li a1, 40
+; CHECK-RV32-NEXT:    mul a0, a0, a1
+; CHECK-RV32-NEXT:    add sp, sp, a0
+; CHECK-RV32-NEXT:    addi sp, sp, 16
+; CHECK-RV32-NEXT:    ret
 ;
-; CHECK-INDEXED-RV32-LABEL: test_expandload_v32i64:
-; CHECK-INDEXED-RV32:       # %bb.0:
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v0
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 2, e8, mf4, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vslidedown.vi v0, v0, 2
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 16, e32, m4, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    viota.m v28, v0
-; CHECK-INDEXED-RV32-NEXT:    vsll.vi v28, v28, 3, v0.t
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e16, m2, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a1, v24
-; CHECK-INDEXED-RV32-NEXT:    zext.h a1, a1
-; CHECK-INDEXED-RV32-NEXT:    cpop a1, a1
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a1, 3
-; CHECK-INDEXED-RV32-NEXT:    add a1, a0, a1
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e64, m8, ta, mu
-; CHECK-INDEXED-RV32-NEXT:    vluxei32.v v16, (a1), v28, v0.t
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e32, m4, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    viota.m v28, v24
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v0, v24
-; CHECK-INDEXED-RV32-NEXT:    vsll.vi v28, v28, 3, v0.t
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e64, m8, ta, mu
-; CHECK-INDEXED-RV32-NEXT:    vluxei32.v v8, (a0), v28, v0.t
-; CHECK-INDEXED-RV32-NEXT:    ret
-;
-; CHECK-INDEXED-RV64-LABEL: test_expandload_v32i64:
-; CHECK-INDEXED-RV64:       # %bb.0:
-; CHECK-INDEXED-RV64-NEXT:    addi sp, sp, -16
-; CHECK-INDEXED-RV64-NEXT:    .cfi_def_cfa_offset 16
-; CHECK-INDEXED-RV64-NEXT:    csrr a1, vlenb
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a1, 4
-; CHECK-INDEXED-RV64-NEXT:    sub sp, sp, a1
-; CHECK-INDEXED-RV64-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x10, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 16 * vlenb
-; CHECK-INDEXED-RV64-NEXT:    csrr a1, vlenb
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a1, 3
-; CHECK-INDEXED-RV64-NEXT:    add a1, sp, a1
-; CHECK-INDEXED-RV64-NEXT:    addi a1, a1, 16
-; CHECK-INDEXED-RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v7, v0
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 2, e8, mf4, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vslidedown.vi v0, v0, 2
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 16, e64, m8, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    viota.m v16, v0
-; CHECK-INDEXED-RV64-NEXT:    vsll.vi v16, v16, 3, v0.t
-; CHECK-INDEXED-RV64-NEXT:    addi a1, sp, 16
-; CHECK-INDEXED-RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e16, m2, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.x.s a1, v7
-; CHECK-INDEXED-RV64-NEXT:    zext.h a1, a1
-; CHECK-INDEXED-RV64-NEXT:    cpopw a1, a1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a1, 3
-; CHECK-INDEXED-RV64-NEXT:    add a1, a0, a1
-; CHECK-INDEXED-RV64-NEXT:    csrr a2, vlenb
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a2, 3
-; CHECK-INDEXED-RV64-NEXT:    add a2, sp, a2
-; CHECK-INDEXED-RV64-NEXT:    addi a2, a2, 16
-; CHECK-INDEXED-RV64-NEXT:    vl8r.v v16, (a2) # Unknown-size Folded Reload
-; CHECK-INDEXED-RV64-NEXT:    addi a2, sp, 16
-; CHECK-INDEXED-RV64-NEXT:    vl8r.v v24, (a2) # Unknown-size Folded Reload
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e64, m8, ta, mu
-; CHECK-INDEXED-RV64-NEXT:    vluxei64.v v16, (a1), v24, v0.t
-; CHECK-INDEXED-RV64-NEXT:    viota.m v24, v7
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v0, v7
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e64, m8, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vsll.vi v24, v24, 3, v0.t
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e64, m8, ta, mu
-; CHECK-INDEXED-RV64-NEXT:    vluxei64.v v8, (a0), v24, v0.t
-; CHECK-INDEXED-RV64-NEXT:    csrr a0, vlenb
-; CHECK-INDEXED-RV64-NEXT:    slli a0, a0, 4
-; CHECK-INDEXED-RV64-NEXT:    add sp, sp, a0
-; CHECK-INDEXED-RV64-NEXT:    addi sp, sp, 16
-; CHECK-INDEXED-RV64-NEXT:    ret
+; CHECK-RV64-LABEL: test_expandload_v32i64:
+; CHECK-RV64:       # %bb.0:
+; CHECK-RV64-NEXT:    addi sp, sp, -16
+; CHECK-RV64-NEXT:    .cfi_def_cfa_offset 16
+; CHECK-RV64-NEXT:    csrr a1, vlenb
+; CHECK-RV64-NEXT:    li a2, 40
+; CHECK-RV64-NEXT:    mul a1, a1, a2
+; CHECK-RV64-NEXT:    sub sp, sp, a1
+; CHECK-RV64-NEXT:    .cfi_escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x28, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22 # sp + 16 + 40 * vlenb
+; CHECK-RV64-NEXT:    csrr a1, vlenb
+; CHECK-RV64-NEXT:    slli a1, a1, 5
+; CHECK-RV64-NEXT:    add a1, sp, a1
+; CHECK-RV64-NEXT:    addi a1, a1, 16
+; CHECK-RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
+; CHECK-RV64-NEXT:    vsetivli zero, 16, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vcpop.m a1, v0
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e16, m2, ta, ma
+; CHECK-RV64-NEXT:    vle64.v v16, (a0)
+; CHECK-RV64-NEXT:    csrr a1, vlenb
+; CHECK-RV64-NEXT:    li a2, 24
+; CHECK-RV64-NEXT:    mul a1, a1, a2
+; CHECK-RV64-NEXT:    add a1, sp, a1
+; CHECK-RV64-NEXT:    addi a1, a1, 16
+; CHECK-RV64-NEXT:    vs8r.v v16, (a1) # Unknown-size Folded Spill
+; CHECK-RV64-NEXT:    vmv.x.s a1, v0
+; CHECK-RV64-NEXT:    zext.h a1, a1
+; CHECK-RV64-NEXT:    cpopw a1, a1
+; CHECK-RV64-NEXT:    slli a1, a1, 3
+; CHECK-RV64-NEXT:    add a0, a0, a1
+; CHECK-RV64-NEXT:    vsetivli zero, 2, e8, mf4, ta, ma
+; CHECK-RV64-NEXT:    vslidedown.vi v7, v0, 2
+; CHECK-RV64-NEXT:    vsetivli zero, 16, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vcpop.m a1, v7
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e64, m8, ta, ma
+; CHECK-RV64-NEXT:    vle64.v v16, (a0)
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    slli a0, a0, 4
+; CHECK-RV64-NEXT:    add a0, sp, a0
+; CHECK-RV64-NEXT:    addi a0, a0, 16
+; CHECK-RV64-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
+; CHECK-RV64-NEXT:    vsetivli zero, 16, e64, m8, ta, mu
+; CHECK-RV64-NEXT:    viota.m v16, v0
+; CHECK-RV64-NEXT:    addi a0, sp, 16
+; CHECK-RV64-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    li a1, 24
+; CHECK-RV64-NEXT:    mul a0, a0, a1
+; CHECK-RV64-NEXT:    add a0, sp, a0
+; CHECK-RV64-NEXT:    addi a0, a0, 16
+; CHECK-RV64-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
+; CHECK-RV64-NEXT:    addi a0, sp, 16
+; CHECK-RV64-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
+; CHECK-RV64-NEXT:    vrgather.vv v8, v16, v24, v0.t
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    slli a0, a0, 3
+; CHECK-RV64-NEXT:    add a0, sp, a0
+; CHECK-RV64-NEXT:    addi a0, a0, 16
+; CHECK-RV64-NEXT:    vs8r.v v8, (a0) # Unknown-size Folded Spill
+; CHECK-RV64-NEXT:    viota.m v16, v7
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    li a1, 24
+; CHECK-RV64-NEXT:    mul a0, a0, a1
+; CHECK-RV64-NEXT:    add a0, sp, a0
+; CHECK-RV64-NEXT:    addi a0, a0, 16
+; CHECK-RV64-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
+; CHECK-RV64-NEXT:    vmv1r.v v0, v7
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    slli a0, a0, 5
+; CHECK-RV64-NEXT:    add a0, sp, a0
+; CHECK-RV64-NEXT:    addi a0, a0, 16
+; CHECK-RV64-NEXT:    vl8r.v v16, (a0) # Unknown-size Folded Reload
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    slli a0, a0, 4
+; CHECK-RV64-NEXT:    add a0, sp, a0
+; CHECK-RV64-NEXT:    addi a0, a0, 16
+; CHECK-RV64-NEXT:    vl8r.v v24, (a0) # Unknown-size Folded Reload
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    li a1, 24
+; CHECK-RV64-NEXT:    mul a0, a0, a1
+; CHECK-RV64-NEXT:    add a0, sp, a0
+; CHECK-RV64-NEXT:    addi a0, a0, 16
+; CHECK-RV64-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
+; CHECK-RV64-NEXT:    vrgather.vv v16, v24, v8, v0.t
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    slli a0, a0, 3
+; CHECK-RV64-NEXT:    add a0, sp, a0
+; CHECK-RV64-NEXT:    addi a0, a0, 16
+; CHECK-RV64-NEXT:    vl8r.v v8, (a0) # Unknown-size Folded Reload
+; CHECK-RV64-NEXT:    csrr a0, vlenb
+; CHECK-RV64-NEXT:    li a1, 40
+; CHECK-RV64-NEXT:    mul a0, a0, a1
+; CHECK-RV64-NEXT:    add sp, sp, a0
+; CHECK-RV64-NEXT:    addi sp, sp, 16
+; CHECK-RV64-NEXT:    ret
   %res = call <32 x i64> @llvm.masked.expandload.v32i64(ptr align 8 %base, <32 x i1> %mask, <32 x i64> %passthru)
   ret <32 x i64> %res
 }
@@ -2360,37106 +1611,18560 @@ define <32 x i64> @test_expandload_v32i64_all_ones(ptr %base, <32 x i64> %passth
 ; Tests that will exceed the range of i8 index.
 
 define <512 x i8> @test_expandload_v512i8(ptr %base, <512 x i1> %mask, <512 x i8> %passthru) vscale_range(16, 1024) {
-; CHECK-VRGATHER-LABEL: test_expandload_v512i8:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    li a1, 512
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e8, m4, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a2, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a2, e8, m4, ta, ma
-; CHECK-VRGATHER-NEXT:    vle8.v v12, (a0)
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
-; CHECK-VRGATHER-NEXT:    viota.m v16, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, zero, e8, m4, ta, mu
-; CHECK-VRGATHER-NEXT:    vrgatherei16.vv v8, v12, v16, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: test_expandload_v512i8:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    li a1, 512
-; CHECK-INDEXED-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
-; CHECK-INDEXED-NEXT:    viota.m v16, v0
-; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e8, m4, ta, mu
-; CHECK-INDEXED-NEXT:    vluxei16.v v8, (a0), v16, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: test_expandload_v512i8:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    li a1, 512
+; CHECK-NEXT:    vsetvli zero, a1, e8, m4, ta, ma
+; CHECK-NEXT:    vcpop.m a2, v0
+; CHECK-NEXT:    vsetvli zero, a2, e8, m4, ta, ma
+; CHECK-NEXT:    vle8.v v12, (a0)
+; CHECK-NEXT:    vsetvli zero, a1, e16, m8, ta, ma
+; CHECK-NEXT:    viota.m v16, v0
+; CHECK-NEXT:    vsetvli zero, zero, e8, m4, ta, mu
+; CHECK-NEXT:    vrgatherei16.vv v8, v12, v16, v0.t
+; CHECK-NEXT:    ret
   %res = call <512 x i8> @llvm.masked.expandload.v512i8(ptr align 1 %base, <512 x i1> %mask, <512 x i8> %passthru)
   ret <512 x i8> %res
 }
 
 ; FIXME: We can split it in lowering.
 define <512 x i8> @test_expandload_v512i8_vlen512(ptr %base, <512 x i1> %mask, <512 x i8> %passthru) vscale_range(8, 1024) {
-; CHECK-VRGATHER-RV32-LABEL: test_expandload_v512i8_vlen512:
-; CHECK-VRGATHER-RV32:       # %bb.0:
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a3, v0
-; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 1
-; CHECK-VRGATHER-RV32-NEXT:    beqz a1, .LBB61_1
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_544
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1: # %else
-; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 2
-; CHECK-VRGATHER-RV32-NEXT:    beqz a1, .LBB61_2
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_545
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_2: # %else2
-; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 4
-; CHECK-VRGATHER-RV32-NEXT:    beqz a1, .LBB61_3
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_546
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_3: # %else6
-; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 8
-; CHECK-VRGATHER-RV32-NEXT:    beqz a1, .LBB61_4
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_547
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_4: # %else10
-; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 16
-; CHECK-VRGATHER-RV32-NEXT:    beqz a1, .LBB61_5
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_548
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_5: # %else14
-; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 32
-; CHECK-VRGATHER-RV32-NEXT:    beqz a1, .LBB61_6
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_549
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_6: # %else18
-; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 64
-; CHECK-VRGATHER-RV32-NEXT:    beqz a1, .LBB61_7
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_550
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_7: # %else22
-; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 128
-; CHECK-VRGATHER-RV32-NEXT:    beqz a1, .LBB61_8
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_551
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_8: # %else26
-; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 256
-; CHECK-VRGATHER-RV32-NEXT:    beqz a1, .LBB61_9
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_552
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_9: # %else30
-; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    beqz a1, .LBB61_10
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_553
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_10: # %else34
-; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 1024
-; CHECK-VRGATHER-RV32-NEXT:    beqz a1, .LBB61_11
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_554
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_11: # %else38
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 20
-; CHECK-VRGATHER-RV32-NEXT:    bgez a1, .LBB61_12
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_555
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_12: # %else42
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 19
-; CHECK-VRGATHER-RV32-NEXT:    bgez a1, .LBB61_13
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_556
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_13: # %else46
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 18
-; CHECK-VRGATHER-RV32-NEXT:    bgez a1, .LBB61_14
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_557
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_14: # %else50
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 17
-; CHECK-VRGATHER-RV32-NEXT:    bgez a1, .LBB61_15
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_558
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_15: # %else54
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 16
-; CHECK-VRGATHER-RV32-NEXT:    bgez a1, .LBB61_16
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_559
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_16: # %else58
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 15
-; CHECK-VRGATHER-RV32-NEXT:    bgez a1, .LBB61_17
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_560
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_17: # %else62
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 14
-; CHECK-VRGATHER-RV32-NEXT:    bgez a1, .LBB61_18
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_561
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_18: # %else66
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 13
-; CHECK-VRGATHER-RV32-NEXT:    bgez a1, .LBB61_19
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_562
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_19: # %else70
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 12
-; CHECK-VRGATHER-RV32-NEXT:    bgez a1, .LBB61_20
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_563
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_20: # %else74
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 11
-; CHECK-VRGATHER-RV32-NEXT:    bgez a1, .LBB61_21
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_564
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_21: # %else78
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 10
-; CHECK-VRGATHER-RV32-NEXT:    bgez a1, .LBB61_22
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_565
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_22: # %else82
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 9
-; CHECK-VRGATHER-RV32-NEXT:    bgez a1, .LBB61_23
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_566
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_23: # %else86
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 8
-; CHECK-VRGATHER-RV32-NEXT:    bgez a1, .LBB61_24
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_567
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_24: # %else90
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 7
-; CHECK-VRGATHER-RV32-NEXT:    bgez a1, .LBB61_25
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_568
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_25: # %else94
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 6
-; CHECK-VRGATHER-RV32-NEXT:    bgez a1, .LBB61_26
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_569
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_26: # %else98
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 5
-; CHECK-VRGATHER-RV32-NEXT:    bgez a1, .LBB61_27
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_570
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_27: # %else102
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 4
-; CHECK-VRGATHER-RV32-NEXT:    bgez a1, .LBB61_28
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_571
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_28: # %else106
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 3
-; CHECK-VRGATHER-RV32-NEXT:    bgez a1, .LBB61_30
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_29: # %cond.load109
-; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 29, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 28
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_30: # %else110
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 2
-; CHECK-VRGATHER-RV32-NEXT:    li a1, 32
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_32
-; CHECK-VRGATHER-RV32-NEXT:  # %bb.31: # %cond.load113
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 30, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 29
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_32: # %else114
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 1
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vsrl.vx v16, v0, a1
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_34
-; CHECK-VRGATHER-RV32-NEXT:  # %bb.33: # %cond.load117
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v17, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 31, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v17, 30
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_34: # %else118
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a2, v16
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_35
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_572
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_35: # %else122
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_36
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_573
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_36: # %else126
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 2
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_37
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_574
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_37: # %else130
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 4
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_38
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_575
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_38: # %else134
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 8
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_39
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_576
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_39: # %else138
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 16
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_40
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_577
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_40: # %else142
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 32
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_41
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_578
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_41: # %else146
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 64
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_42
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_579
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_42: # %else150
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 128
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_43
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_580
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_43: # %else154
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 256
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_44
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_581
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_44: # %else158
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 512
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_45
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_582
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_45: # %else162
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_46
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_583
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_46: # %else166
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 20
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_47
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_584
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_47: # %else170
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 19
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_48
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_585
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_48: # %else174
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 18
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_49
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_586
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_49: # %else178
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 17
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_50
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_587
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_50: # %else182
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 16
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_51
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_588
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_51: # %else186
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 15
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_52
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_589
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_52: # %else190
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 14
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_53
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_590
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_53: # %else194
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 13
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_54
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_591
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_54: # %else198
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 12
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_55
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_592
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_55: # %else202
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 11
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_56
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_593
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_56: # %else206
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 10
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_57
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_594
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_57: # %else210
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 9
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_58
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_595
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_58: # %else214
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 8
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_59
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_596
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_59: # %else218
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 7
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_60
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_597
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_60: # %else222
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 6
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_61
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_598
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_61: # %else226
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 5
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_62
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_599
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_62: # %else230
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 4
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_63
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_600
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_63: # %else234
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 3
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_64
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_601
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_64: # %else238
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 2
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_66
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_65: # %cond.load241
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 62
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 61
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_66: # %else242
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 1
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslidedown.vi v16, v0, 1
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_68
-; CHECK-VRGATHER-RV32-NEXT:  # %bb.67: # %cond.load245
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v17, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 63
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 62
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v17, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_68: # %else246
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a3, v16
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_69
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_602
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_69: # %else250
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_70
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_603
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_70: # %else254
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 2
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_71
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_604
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_71: # %else258
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 4
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_72
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_605
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_72: # %else262
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 8
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_73
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_606
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_73: # %else266
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 16
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_74
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_607
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_74: # %else270
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 32
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_75
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_608
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_75: # %else274
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 64
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_76
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_609
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_76: # %else278
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 128
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_77
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_610
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_77: # %else282
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 256
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_78
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_611
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_78: # %else286
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_79
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_612
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_79: # %else290
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_80
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_613
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_80: # %else294
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 20
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_81
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_614
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_81: # %else298
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 19
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_82
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_615
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_82: # %else302
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 18
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_83
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_616
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_83: # %else306
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 17
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_84
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_617
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_84: # %else310
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 16
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_85
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_618
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_85: # %else314
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 15
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_86
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_619
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_86: # %else318
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 14
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_87
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_620
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_87: # %else322
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 13
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_88
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_621
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_88: # %else326
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 12
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_89
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_622
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_89: # %else330
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 11
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_90
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_623
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_90: # %else334
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 10
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_91
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_624
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_91: # %else338
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 9
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_92
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_625
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_92: # %else342
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 8
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_93
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_626
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_93: # %else346
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 7
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_94
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_627
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_94: # %else350
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 6
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_95
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_628
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_95: # %else354
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 5
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_96
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_629
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_96: # %else358
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 4
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_97
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_630
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_97: # %else362
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 3
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_98
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_631
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_98: # %else366
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 2
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_100
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_99: # %cond.load369
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 94
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 93
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_100: # %else370
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 1
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vsrl.vx v16, v16, a1
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_102
-; CHECK-VRGATHER-RV32-NEXT:  # %bb.101: # %cond.load373
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 95
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 94
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_102: # %else374
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a2, v16
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_103
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_632
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_103: # %else378
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_104
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_633
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_104: # %else382
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 2
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_105
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_634
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_105: # %else386
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 4
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_106
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_635
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_106: # %else390
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 8
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_107
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_636
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_107: # %else394
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 16
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_108
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_637
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_108: # %else398
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 32
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_109
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_638
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_109: # %else402
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 64
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_110
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_639
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_110: # %else406
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 128
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_111
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_640
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_111: # %else410
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 256
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_112
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_641
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_112: # %else414
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 512
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_113
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_642
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_113: # %else418
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_114
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_643
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_114: # %else422
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 20
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_115
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_644
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_115: # %else426
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 19
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_116
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_645
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_116: # %else430
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 18
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_117
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_646
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_117: # %else434
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 17
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_118
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_647
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_118: # %else438
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 16
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_119
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_648
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_119: # %else442
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 15
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_120
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_649
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_120: # %else446
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 14
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_121
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_650
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_121: # %else450
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 13
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_122
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_651
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_122: # %else454
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 12
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_123
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_652
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_123: # %else458
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 11
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_124
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_653
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_124: # %else462
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 10
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_125
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_654
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_125: # %else466
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 9
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_126
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_655
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_126: # %else470
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 8
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_127
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_656
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_127: # %else474
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 7
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_128
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_657
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_128: # %else478
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 6
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_129
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_658
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_129: # %else482
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 5
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_130
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_659
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_130: # %else486
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 4
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_131
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_660
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_131: # %else490
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 3
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_132
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_661
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_132: # %else494
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 2
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_134
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_133: # %cond.load497
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 126
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 125
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_134: # %else498
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 1
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslidedown.vi v16, v0, 2
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_136
-; CHECK-VRGATHER-RV32-NEXT:  # %bb.135: # %cond.load501
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 127
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 126
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_136: # %else502
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a3, v16
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_137
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_662
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_137: # %else506
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_138
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_663
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_138: # %else510
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 2
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_139
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_664
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_139: # %else514
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 4
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_140
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_665
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_140: # %else518
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 8
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_141
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_666
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_141: # %else522
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 16
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_142
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_667
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_142: # %else526
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 32
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_143
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_668
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_143: # %else530
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 64
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_144
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_669
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_144: # %else534
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 128
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_145
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_670
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_145: # %else538
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 256
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_146
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_671
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_146: # %else542
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_147
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_672
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_147: # %else546
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_148
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_673
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_148: # %else550
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 20
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_149
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_674
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_149: # %else554
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 19
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_150
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_675
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_150: # %else558
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 18
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_151
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_676
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_151: # %else562
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 17
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_152
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_677
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_152: # %else566
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 16
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_153
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_678
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_153: # %else570
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 15
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_154
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_679
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_154: # %else574
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 14
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_155
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_680
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_155: # %else578
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 13
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_156
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_681
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_156: # %else582
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 12
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_157
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_682
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_157: # %else586
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 11
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_158
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_683
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_158: # %else590
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 10
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_159
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_684
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_159: # %else594
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 9
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_160
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_685
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_160: # %else598
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 8
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_161
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_686
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_161: # %else602
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 7
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_162
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_687
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_162: # %else606
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 6
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_163
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_688
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_163: # %else610
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 5
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_164
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_689
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_164: # %else614
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 4
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_165
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_690
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_165: # %else618
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 3
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_166
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_691
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_166: # %else622
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 2
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_168
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_167: # %cond.load625
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 158
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 157
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_168: # %else626
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 1
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vsrl.vx v16, v16, a1
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_170
-; CHECK-VRGATHER-RV32-NEXT:  # %bb.169: # %cond.load629
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 159
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 158
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_170: # %else630
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a2, v16
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_171
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_692
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_171: # %else634
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_172
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_693
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_172: # %else638
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 2
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_173
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_694
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_173: # %else642
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 4
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_174
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_695
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_174: # %else646
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 8
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_175
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_696
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_175: # %else650
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 16
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_176
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_697
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_176: # %else654
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 32
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_177
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_698
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_177: # %else658
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 64
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_178
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_699
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_178: # %else662
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 128
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_179
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_700
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_179: # %else666
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 256
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_180
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_701
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_180: # %else670
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 512
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_181
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_702
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_181: # %else674
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_182
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_703
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_182: # %else678
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 20
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_183
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_704
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_183: # %else682
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 19
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_184
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_705
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_184: # %else686
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 18
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_185
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_706
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_185: # %else690
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 17
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_186
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_707
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_186: # %else694
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 16
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_187
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_708
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_187: # %else698
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 15
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_188
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_709
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_188: # %else702
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 14
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_189
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_710
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_189: # %else706
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 13
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_190
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_711
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_190: # %else710
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 12
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_191
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_712
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_191: # %else714
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 11
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_192
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_713
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_192: # %else718
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 10
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_193
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_714
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_193: # %else722
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 9
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_194
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_715
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_194: # %else726
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 8
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_195
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_716
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_195: # %else730
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 7
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_196
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_717
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_196: # %else734
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 6
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_197
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_718
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_197: # %else738
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 5
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_198
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_719
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_198: # %else742
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 4
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_199
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_720
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_199: # %else746
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 3
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_200
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_721
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_200: # %else750
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 2
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_202
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_201: # %cond.load753
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 190
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 189
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_202: # %else754
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 1
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslidedown.vi v16, v0, 3
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_204
-; CHECK-VRGATHER-RV32-NEXT:  # %bb.203: # %cond.load757
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 191
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 190
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_204: # %else758
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a3, v16
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_205
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_722
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_205: # %else762
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_206
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_723
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_206: # %else766
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 2
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_207
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_724
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_207: # %else770
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 4
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_208
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_725
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_208: # %else774
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 8
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_209
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_726
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_209: # %else778
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 16
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_210
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_727
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_210: # %else782
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 32
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_211
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_728
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_211: # %else786
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 64
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_212
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_729
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_212: # %else790
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 128
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_213
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_730
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_213: # %else794
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 256
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_214
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_731
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_214: # %else798
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_215
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_732
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_215: # %else802
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_216
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_733
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_216: # %else806
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 20
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_217
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_734
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_217: # %else810
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 19
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_218
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_735
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_218: # %else814
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 18
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_219
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_736
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_219: # %else818
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 17
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_220
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_737
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_220: # %else822
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 16
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_221
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_738
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_221: # %else826
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 15
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_222
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_739
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_222: # %else830
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 14
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_223
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_740
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_223: # %else834
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 13
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_224
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_741
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_224: # %else838
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 12
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_225
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_742
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_225: # %else842
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 11
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_226
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_743
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_226: # %else846
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 10
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_227
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_744
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_227: # %else850
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 9
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_228
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_745
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_228: # %else854
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 8
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_229
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_746
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_229: # %else858
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 7
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_230
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_747
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_230: # %else862
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 6
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_231
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_748
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_231: # %else866
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 5
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_232
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_749
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_232: # %else870
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 4
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_233
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_750
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_233: # %else874
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 3
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_234
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_751
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_234: # %else878
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 2
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_236
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_235: # %cond.load881
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 222
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 221
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_236: # %else882
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 1
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vsrl.vx v16, v16, a1
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_238
-; CHECK-VRGATHER-RV32-NEXT:  # %bb.237: # %cond.load885
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 223
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 222
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_238: # %else886
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a2, v16
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_239
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_752
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_239: # %else890
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_240
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_753
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_240: # %else894
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 2
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_241
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_754
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_241: # %else898
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 4
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_242
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_755
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_242: # %else902
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 8
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_243
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_756
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_243: # %else906
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 16
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_244
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_757
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_244: # %else910
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 32
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_245
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_758
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_245: # %else914
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 64
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_246
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_759
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_246: # %else918
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 128
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_247
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_760
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_247: # %else922
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 256
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_248
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_761
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_248: # %else926
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 512
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_249
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_762
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_249: # %else930
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_250
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_763
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_250: # %else934
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 20
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_251
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_764
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_251: # %else938
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 19
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_252
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_765
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_252: # %else942
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 18
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_253
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_766
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_253: # %else946
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 17
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_254
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_767
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_254: # %else950
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 16
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_255
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_768
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_255: # %else954
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 15
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_256
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_769
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_256: # %else958
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 14
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_257
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_770
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_257: # %else962
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 13
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_258
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_771
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_258: # %else966
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 12
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_259
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_772
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_259: # %else970
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 11
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_260
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_773
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_260: # %else974
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 10
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_261
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_774
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_261: # %else978
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 9
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_262
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_775
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_262: # %else982
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 8
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_263
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_776
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_263: # %else986
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 7
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_264
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_777
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_264: # %else990
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 6
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_265
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_778
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_265: # %else994
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 5
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_266
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_779
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_266: # %else998
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 4
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_267
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_780
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_267: # %else1002
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 3
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_268
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_781
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_268: # %else1006
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 2
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_270
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_269: # %cond.load1009
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 254
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 253
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_270: # %else1010
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 1
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslidedown.vi v16, v0, 4
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_272
-; CHECK-VRGATHER-RV32-NEXT:  # %bb.271: # %cond.load1013
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 255
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 254
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_272: # %else1014
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a3, v16
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_273
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_782
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_273: # %else1018
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_274
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_783
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_274: # %else1022
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 2
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_275
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_784
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_275: # %else1026
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 4
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_276
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_785
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_276: # %else1030
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 8
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_277
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_786
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_277: # %else1034
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 16
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_278
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_787
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_278: # %else1038
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 32
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_279
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_788
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_279: # %else1042
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 64
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_280
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_789
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_280: # %else1046
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 128
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_281
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_790
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_281: # %else1050
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 256
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_282
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_791
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_282: # %else1054
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_283
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_792
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_283: # %else1058
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_284
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_793
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_284: # %else1062
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 20
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_285
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_794
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_285: # %else1066
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 19
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_286
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_795
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_286: # %else1070
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 18
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_287
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_796
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_287: # %else1074
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 17
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_288
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_797
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_288: # %else1078
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 16
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_289
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_798
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_289: # %else1082
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 15
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_290
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_799
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_290: # %else1086
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 14
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_291
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_800
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_291: # %else1090
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 13
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_292
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_801
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_292: # %else1094
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 12
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_293
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_802
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_293: # %else1098
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 11
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_294
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_803
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_294: # %else1102
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 10
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_295
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_804
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_295: # %else1106
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 9
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_296
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_805
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_296: # %else1110
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 8
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_297
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_806
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_297: # %else1114
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 7
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_298
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_807
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_298: # %else1118
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 6
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_299
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_808
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_299: # %else1122
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 5
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_300
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_809
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_300: # %else1126
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 4
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_301
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_810
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_301: # %else1130
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 3
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_302
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_811
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_302: # %else1134
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 2
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_304
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_303: # %cond.load1137
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 286
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 285
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_304: # %else1138
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 1
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vsrl.vx v16, v16, a1
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_306
-; CHECK-VRGATHER-RV32-NEXT:  # %bb.305: # %cond.load1141
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 287
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 286
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_306: # %else1142
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a2, v16
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_307
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_812
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_307: # %else1146
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_308
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_813
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_308: # %else1150
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 2
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_309
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_814
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_309: # %else1154
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 4
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_310
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_815
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_310: # %else1158
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 8
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_311
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_816
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_311: # %else1162
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 16
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_312
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_817
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_312: # %else1166
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 32
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_313
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_818
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_313: # %else1170
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 64
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_314
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_819
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_314: # %else1174
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 128
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_315
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_820
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_315: # %else1178
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 256
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_316
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_821
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_316: # %else1182
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 512
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_317
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_822
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_317: # %else1186
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_318
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_823
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_318: # %else1190
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 20
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_319
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_824
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_319: # %else1194
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 19
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_320
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_825
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_320: # %else1198
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 18
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_321
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_826
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_321: # %else1202
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 17
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_322
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_827
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_322: # %else1206
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 16
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_323
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_828
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_323: # %else1210
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 15
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_324
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_829
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_324: # %else1214
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 14
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_325
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_830
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_325: # %else1218
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 13
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_326
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_831
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_326: # %else1222
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 12
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_327
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_832
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_327: # %else1226
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 11
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_328
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_833
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_328: # %else1230
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 10
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_329
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_834
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_329: # %else1234
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 9
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_330
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_835
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_330: # %else1238
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 8
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_331
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_836
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_331: # %else1242
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 7
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_332
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_837
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_332: # %else1246
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 6
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_333
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_838
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_333: # %else1250
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 5
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_334
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_839
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_334: # %else1254
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 4
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_335
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_840
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_335: # %else1258
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 3
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_336
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_841
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_336: # %else1262
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 2
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_338
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_337: # %cond.load1265
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 318
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 317
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_338: # %else1266
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 1
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslidedown.vi v16, v0, 5
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_340
-; CHECK-VRGATHER-RV32-NEXT:  # %bb.339: # %cond.load1269
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 319
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 318
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_340: # %else1270
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a3, v16
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_341
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_842
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_341: # %else1274
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_342
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_843
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_342: # %else1278
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 2
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_343
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_844
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_343: # %else1282
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 4
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_344
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_845
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_344: # %else1286
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 8
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_345
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_846
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_345: # %else1290
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 16
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_346
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_847
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_346: # %else1294
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 32
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_347
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_848
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_347: # %else1298
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 64
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_348
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_849
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_348: # %else1302
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 128
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_349
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_850
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_349: # %else1306
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 256
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_350
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_851
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_350: # %else1310
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_351
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_852
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_351: # %else1314
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_352
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_853
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_352: # %else1318
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 20
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_353
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_854
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_353: # %else1322
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 19
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_354
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_855
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_354: # %else1326
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 18
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_355
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_856
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_355: # %else1330
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 17
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_356
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_857
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_356: # %else1334
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 16
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_357
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_858
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_357: # %else1338
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 15
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_358
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_859
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_358: # %else1342
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 14
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_359
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_860
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_359: # %else1346
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 13
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_360
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_861
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_360: # %else1350
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 12
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_361
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_862
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_361: # %else1354
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 11
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_362
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_863
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_362: # %else1358
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 10
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_363
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_864
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_363: # %else1362
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 9
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_364
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_865
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_364: # %else1366
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 8
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_365
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_866
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_365: # %else1370
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 7
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_366
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_867
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_366: # %else1374
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 6
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_367
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_868
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_367: # %else1378
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 5
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_368
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_869
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_368: # %else1382
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 4
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_369
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_870
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_369: # %else1386
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 3
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_370
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_871
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_370: # %else1390
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 2
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_372
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_371: # %cond.load1393
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 350
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 349
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_372: # %else1394
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 1
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vsrl.vx v16, v16, a1
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_374
-; CHECK-VRGATHER-RV32-NEXT:  # %bb.373: # %cond.load1397
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 351
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 350
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_374: # %else1398
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a2, v16
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_375
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_872
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_375: # %else1402
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_376
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_873
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_376: # %else1406
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 2
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_377
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_874
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_377: # %else1410
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 4
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_378
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_875
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_378: # %else1414
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 8
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_379
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_876
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_379: # %else1418
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 16
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_380
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_877
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_380: # %else1422
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 32
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_381
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_878
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_381: # %else1426
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 64
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_382
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_879
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_382: # %else1430
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 128
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_383
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_880
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_383: # %else1434
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 256
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_384
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_881
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_384: # %else1438
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 512
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_385
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_882
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_385: # %else1442
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_386
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_883
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_386: # %else1446
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 20
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_387
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_884
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_387: # %else1450
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 19
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_388
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_885
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_388: # %else1454
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 18
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_389
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_886
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_389: # %else1458
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 17
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_390
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_887
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_390: # %else1462
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 16
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_391
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_888
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_391: # %else1466
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 15
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_392
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_889
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_392: # %else1470
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 14
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_393
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_890
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_393: # %else1474
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 13
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_394
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_891
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_394: # %else1478
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 12
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_395
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_892
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_395: # %else1482
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 11
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_396
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_893
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_396: # %else1486
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 10
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_397
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_894
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_397: # %else1490
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 9
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_398
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_895
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_398: # %else1494
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 8
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_399
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_896
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_399: # %else1498
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 7
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_400
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_897
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_400: # %else1502
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 6
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_401
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_898
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_401: # %else1506
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 5
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_402
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_899
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_402: # %else1510
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 4
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_403
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_900
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_403: # %else1514
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 3
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_404
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_901
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_404: # %else1518
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 2
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_406
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_405: # %cond.load1521
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 382
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 381
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_406: # %else1522
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 1
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslidedown.vi v16, v0, 6
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_408
-; CHECK-VRGATHER-RV32-NEXT:  # %bb.407: # %cond.load1525
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 383
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 382
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_408: # %else1526
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a3, v16
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_409
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_902
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_409: # %else1530
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_410
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_903
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_410: # %else1534
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 2
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_411
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_904
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_411: # %else1538
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 4
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_412
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_905
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_412: # %else1542
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 8
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_413
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_906
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_413: # %else1546
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 16
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_414
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_907
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_414: # %else1550
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 32
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_415
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_908
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_415: # %else1554
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 64
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_416
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_909
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_416: # %else1558
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 128
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_417
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_910
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_417: # %else1562
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 256
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_418
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_911
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_418: # %else1566
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_419
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_912
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_419: # %else1570
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_420
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_913
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_420: # %else1574
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 20
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_421
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_914
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_421: # %else1578
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 19
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_422
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_915
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_422: # %else1582
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 18
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_423
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_916
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_423: # %else1586
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 17
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_424
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_917
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_424: # %else1590
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 16
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_425
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_918
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_425: # %else1594
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 15
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_426
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_919
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_426: # %else1598
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 14
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_427
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_920
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_427: # %else1602
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 13
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_428
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_921
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_428: # %else1606
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 12
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_429
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_922
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_429: # %else1610
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 11
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_430
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_923
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_430: # %else1614
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 10
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_431
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_924
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_431: # %else1618
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 9
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_432
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_925
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_432: # %else1622
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 8
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_433
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_926
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_433: # %else1626
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 7
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_434
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_927
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_434: # %else1630
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 6
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_435
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_928
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_435: # %else1634
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 5
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_436
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_929
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_436: # %else1638
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 4
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_437
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_930
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_437: # %else1642
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 3
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_438
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_931
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_438: # %else1646
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 2
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_440
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_439: # %cond.load1649
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 414
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 413
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_440: # %else1650
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 1
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vsrl.vx v16, v16, a1
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_442
-; CHECK-VRGATHER-RV32-NEXT:  # %bb.441: # %cond.load1653
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 415
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 414
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_442: # %else1654
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a2, v16
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_443
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_932
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_443: # %else1658
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_444
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_933
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_444: # %else1662
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 2
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_445
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_934
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_445: # %else1666
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 4
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_446
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_935
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_446: # %else1670
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 8
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_447
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_936
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_447: # %else1674
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 16
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_448
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_937
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_448: # %else1678
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 32
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_449
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_938
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_449: # %else1682
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 64
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_450
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_939
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_450: # %else1686
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 128
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_451
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_940
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_451: # %else1690
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 256
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_452
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_941
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_452: # %else1694
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 512
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_453
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_942
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_453: # %else1698
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-VRGATHER-RV32-NEXT:    beqz a3, .LBB61_454
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_943
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_454: # %else1702
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 20
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_455
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_944
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_455: # %else1706
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 19
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_456
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_945
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_456: # %else1710
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 18
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_457
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_946
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_457: # %else1714
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 17
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_458
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_947
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_458: # %else1718
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 16
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_459
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_948
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_459: # %else1722
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 15
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_460
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_949
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_460: # %else1726
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 14
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_461
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_950
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_461: # %else1730
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 13
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_462
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_951
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_462: # %else1734
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 12
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_463
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_952
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_463: # %else1738
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 11
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_464
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_953
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_464: # %else1742
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 10
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_465
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_954
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_465: # %else1746
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 9
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_466
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_955
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_466: # %else1750
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 8
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_467
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_956
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_467: # %else1754
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 7
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_468
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_957
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_468: # %else1758
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 6
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_469
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_958
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_469: # %else1762
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 5
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_470
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_959
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_470: # %else1766
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 4
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_471
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_960
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_471: # %else1770
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 3
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_472
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_961
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_472: # %else1774
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 2
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_474
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_473: # %cond.load1777
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 446
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 445
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_474: # %else1778
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 1
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslidedown.vi v16, v0, 7
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_476
-; CHECK-VRGATHER-RV32-NEXT:  # %bb.475: # %cond.load1781
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 447
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 446
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_476: # %else1782
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a3, v16
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_477
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_962
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_477: # %else1786
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_478
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_963
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_478: # %else1790
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 2
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_479
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_964
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_479: # %else1794
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 4
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_480
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_965
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_480: # %else1798
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 8
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_481
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_966
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_481: # %else1802
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 16
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_482
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_967
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_482: # %else1806
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 32
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_483
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_968
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_483: # %else1810
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 64
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_484
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_969
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_484: # %else1814
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 128
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_485
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_970
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_485: # %else1818
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 256
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_486
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_971
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_486: # %else1822
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_487
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_972
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_487: # %else1826
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_488
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_973
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_488: # %else1830
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 20
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_489
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_974
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_489: # %else1834
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 19
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_490
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_975
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_490: # %else1838
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 18
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_491
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_976
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_491: # %else1842
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 17
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_492
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_977
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_492: # %else1846
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 16
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_493
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_978
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_493: # %else1850
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 15
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_494
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_979
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_494: # %else1854
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 14
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_495
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_980
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_495: # %else1858
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 13
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_496
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_981
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_496: # %else1862
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 12
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_497
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_982
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_497: # %else1866
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 11
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_498
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_983
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_498: # %else1870
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 10
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_499
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_984
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_499: # %else1874
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 9
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_500
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_985
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_500: # %else1878
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 8
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_501
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_986
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_501: # %else1882
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 7
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_502
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_987
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_502: # %else1886
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 6
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_503
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_988
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_503: # %else1890
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 5
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_504
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_989
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_504: # %else1894
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 4
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_505
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_990
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_505: # %else1898
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 3
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_506
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_991
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_506: # %else1902
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 2
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_508
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_507: # %cond.load1905
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 478
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 477
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_508: # %else1906
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 1
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vsrl.vx v16, v16, a1
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_510
-; CHECK-VRGATHER-RV32-NEXT:  # %bb.509: # %cond.load1909
-; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a1
-; CHECK-VRGATHER-RV32-NEXT:    li a1, 479
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 478
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_510: # %else1910
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.x.s a1, v16
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_511
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_992
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_511: # %else1914
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 1
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_512
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_993
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_512: # %else1918
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 2
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_513
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_994
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_513: # %else1922
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 4
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_514
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_995
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_514: # %else1926
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 8
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_515
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_996
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_515: # %else1930
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 16
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_516
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_997
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_516: # %else1934
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 32
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_517
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_998
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_517: # %else1938
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 64
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_518
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_999
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_518: # %else1942
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 128
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_519
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1000
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_519: # %else1946
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 256
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_520
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1001
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_520: # %else1950
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 512
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_521
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1002
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_521: # %else1954
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 1024
-; CHECK-VRGATHER-RV32-NEXT:    beqz a2, .LBB61_522
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1003
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_522: # %else1958
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 20
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_523
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1004
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_523: # %else1962
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 19
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_524
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1005
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_524: # %else1966
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 18
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_525
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1006
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_525: # %else1970
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 17
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_526
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1007
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_526: # %else1974
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 16
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_527
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1008
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_527: # %else1978
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 15
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_528
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1009
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_528: # %else1982
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 14
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_529
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1010
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_529: # %else1986
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 13
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_530
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1011
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_530: # %else1990
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 12
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_531
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1012
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_531: # %else1994
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 11
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_532
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1013
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_532: # %else1998
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 10
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_533
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1014
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_533: # %else2002
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 9
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_534
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1015
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_534: # %else2006
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 8
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_535
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1016
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_535: # %else2010
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 7
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_536
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1017
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_536: # %else2014
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 6
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_537
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1018
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_537: # %else2018
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 5
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_538
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1019
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_538: # %else2022
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 4
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_539
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1020
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_539: # %else2026
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 3
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_540
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1021
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_540: # %else2030
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 2
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_541
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1022
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_541: # %else2034
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 1
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_542
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1023
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_542: # %else2038
-; CHECK-VRGATHER-RV32-NEXT:    bgez a1, .LBB61_543
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_1024
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_543: # %else2042
-; CHECK-VRGATHER-RV32-NEXT:    ret
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_544: # %cond.load
-; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, zero, e8, mf8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v8, a1
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 2
-; CHECK-VRGATHER-RV32-NEXT:    bnez a1, .LBB61_545
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_2
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_545: # %cond.load1
-; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, zero, e8, mf8, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 2, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 1
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 4
-; CHECK-VRGATHER-RV32-NEXT:    bnez a1, .LBB61_546
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_3
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_546: # %cond.load5
-; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 3, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 2
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 8
-; CHECK-VRGATHER-RV32-NEXT:    bnez a1, .LBB61_547
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_4
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_547: # %cond.load9
-; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 4, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 3
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 16
-; CHECK-VRGATHER-RV32-NEXT:    bnez a1, .LBB61_548
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_5
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_548: # %cond.load13
-; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 5, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 32
-; CHECK-VRGATHER-RV32-NEXT:    bnez a1, .LBB61_549
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_6
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_549: # %cond.load17
-; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 6, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 5
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 64
-; CHECK-VRGATHER-RV32-NEXT:    bnez a1, .LBB61_550
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_7
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_550: # %cond.load21
-; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 7, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 6
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 128
-; CHECK-VRGATHER-RV32-NEXT:    bnez a1, .LBB61_551
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_8
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_551: # %cond.load25
-; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 8, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 7
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 256
-; CHECK-VRGATHER-RV32-NEXT:    bnez a1, .LBB61_552
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_9
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_552: # %cond.load29
-; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 9, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 8
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    bnez a1, .LBB61_553
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_10
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_553: # %cond.load33
-; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 10, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 9
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a1, a3, 1024
-; CHECK-VRGATHER-RV32-NEXT:    bnez a1, .LBB61_554
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_11
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_554: # %cond.load37
-; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 11, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 10
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 20
-; CHECK-VRGATHER-RV32-NEXT:    bltz a1, .LBB61_555
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_12
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_555: # %cond.load41
-; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 12, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 11
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 19
-; CHECK-VRGATHER-RV32-NEXT:    bltz a1, .LBB61_556
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_13
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_556: # %cond.load45
-; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 13, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 12
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 18
-; CHECK-VRGATHER-RV32-NEXT:    bltz a1, .LBB61_557
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_14
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_557: # %cond.load49
-; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 14, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 13
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 17
-; CHECK-VRGATHER-RV32-NEXT:    bltz a1, .LBB61_558
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_15
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_558: # %cond.load53
-; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 15, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 14
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 16
-; CHECK-VRGATHER-RV32-NEXT:    bltz a1, .LBB61_559
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_16
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_559: # %cond.load57
-; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 16, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 15
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 15
-; CHECK-VRGATHER-RV32-NEXT:    bltz a1, .LBB61_560
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_17
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_560: # %cond.load61
-; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 17, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 16
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 14
-; CHECK-VRGATHER-RV32-NEXT:    bltz a1, .LBB61_561
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_18
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_561: # %cond.load65
-; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 18, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 17
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 13
-; CHECK-VRGATHER-RV32-NEXT:    bltz a1, .LBB61_562
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_19
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_562: # %cond.load69
-; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 19, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 18
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 12
-; CHECK-VRGATHER-RV32-NEXT:    bltz a1, .LBB61_563
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_20
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_563: # %cond.load73
-; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 20, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 19
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 11
-; CHECK-VRGATHER-RV32-NEXT:    bltz a1, .LBB61_564
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_21
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_564: # %cond.load77
-; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 21, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 20
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 10
-; CHECK-VRGATHER-RV32-NEXT:    bltz a1, .LBB61_565
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_22
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_565: # %cond.load81
-; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 22, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 21
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 9
-; CHECK-VRGATHER-RV32-NEXT:    bltz a1, .LBB61_566
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_23
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_566: # %cond.load85
-; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 23, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 22
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 8
-; CHECK-VRGATHER-RV32-NEXT:    bltz a1, .LBB61_567
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_24
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_567: # %cond.load89
-; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 24, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 23
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 7
-; CHECK-VRGATHER-RV32-NEXT:    bltz a1, .LBB61_568
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_25
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_568: # %cond.load93
-; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 25, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 24
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 6
-; CHECK-VRGATHER-RV32-NEXT:    bltz a1, .LBB61_569
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_26
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_569: # %cond.load97
-; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 26, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 25
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 5
-; CHECK-VRGATHER-RV32-NEXT:    bltz a1, .LBB61_570
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_27
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_570: # %cond.load101
-; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 27, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 26
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 4
-; CHECK-VRGATHER-RV32-NEXT:    bltz a1, .LBB61_571
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_28
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_571: # %cond.load105
-; CHECK-VRGATHER-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vsetivli zero, 28, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v16, 27
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a1, a3, 3
-; CHECK-VRGATHER-RV32-NEXT:    bgez a1, .LBB61_1025
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_29
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1025: # %cond.load105
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_30
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_572: # %cond.load121
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 32
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vi v8, v24, 31
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_573
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_36
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_573: # %cond.load125
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 33
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 32
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 2
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_574
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_37
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_574: # %cond.load129
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 34
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 33
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 4
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_575
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_38
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_575: # %cond.load133
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 35
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 34
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 8
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_576
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_39
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_576: # %cond.load137
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 36
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 35
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 16
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_577
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_40
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_577: # %cond.load141
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 37
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 36
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 32
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_578
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_41
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_578: # %cond.load145
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 38
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 37
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 64
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_579
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_42
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_579: # %cond.load149
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 39
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 38
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 128
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_580
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_43
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_580: # %cond.load153
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 40
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 39
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 256
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_581
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_44
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_581: # %cond.load157
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 41
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 40
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 512
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_582
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_45
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_582: # %cond.load161
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 42
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 41
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_583
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_46
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_583: # %cond.load165
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 43
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 42
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 20
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_584
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_47
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_584: # %cond.load169
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 44
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 43
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 19
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_585
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_48
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_585: # %cond.load173
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 45
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 44
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 18
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_586
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_49
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_586: # %cond.load177
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 46
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 45
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 17
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_587
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_50
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_587: # %cond.load181
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 47
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 46
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 16
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_588
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_51
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_588: # %cond.load185
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 48
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 47
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 15
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_589
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_52
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_589: # %cond.load189
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 49
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 48
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 14
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_590
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_53
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_590: # %cond.load193
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 50
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 49
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 13
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_591
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_54
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_591: # %cond.load197
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 51
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 50
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 12
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_592
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_55
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_592: # %cond.load201
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 52
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 51
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 11
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_593
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_56
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_593: # %cond.load205
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 53
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 52
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 10
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_594
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_57
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_594: # %cond.load209
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 54
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 53
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 9
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_595
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_58
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_595: # %cond.load213
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 55
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 54
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 8
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_596
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_59
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_596: # %cond.load217
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 56
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 55
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 7
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_597
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_60
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_597: # %cond.load221
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 57
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 56
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 6
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_598
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_61
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_598: # %cond.load225
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 58
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 57
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 5
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_599
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_62
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_599: # %cond.load229
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 59
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 58
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 4
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_600
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_63
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_600: # %cond.load233
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 60
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 59
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 3
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_601
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_64
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_601: # %cond.load237
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 61
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 60
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 2
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_1026
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_65
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1026: # %cond.load237
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_66
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_602: # %cond.load249
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v17, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 64
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 63
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m1, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v17, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_603
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_70
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_603: # %cond.load253
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 65
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 64
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 2
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_604
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_71
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_604: # %cond.load257
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 66
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 65
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 4
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_605
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_72
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_605: # %cond.load261
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 67
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 66
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 8
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_606
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_73
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_606: # %cond.load265
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 68
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 67
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 16
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_607
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_74
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_607: # %cond.load269
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 69
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 68
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 32
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_608
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_75
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_608: # %cond.load273
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 70
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 69
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 64
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_609
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_76
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_609: # %cond.load277
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 71
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 70
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 128
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_610
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_77
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_610: # %cond.load281
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 72
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 71
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 256
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_611
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_78
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_611: # %cond.load285
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 73
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 72
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_612
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_79
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_612: # %cond.load289
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 74
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 73
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_613
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_80
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_613: # %cond.load293
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 75
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 74
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 20
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_614
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_81
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_614: # %cond.load297
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 76
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 75
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 19
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_615
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_82
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_615: # %cond.load301
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 77
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 76
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 18
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_616
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_83
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_616: # %cond.load305
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 78
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 77
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 17
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_617
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_84
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_617: # %cond.load309
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 79
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 78
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 16
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_618
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_85
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_618: # %cond.load313
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 80
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 79
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 15
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_619
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_86
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_619: # %cond.load317
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 81
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 80
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 14
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_620
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_87
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_620: # %cond.load321
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 82
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 81
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 13
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_621
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_88
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_621: # %cond.load325
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 83
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 82
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 12
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_622
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_89
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_622: # %cond.load329
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 84
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 83
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 11
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_623
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_90
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_623: # %cond.load333
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 85
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 84
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 10
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_624
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_91
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_624: # %cond.load337
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 86
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 85
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 9
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_625
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_92
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_625: # %cond.load341
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 87
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 86
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 8
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_626
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_93
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_626: # %cond.load345
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 88
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 87
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 7
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_627
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_94
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_627: # %cond.load349
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 89
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 88
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 6
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_628
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_95
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_628: # %cond.load353
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 90
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 89
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 5
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_629
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_96
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_629: # %cond.load357
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 91
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 90
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 4
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_630
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_97
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_630: # %cond.load361
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 92
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 91
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 3
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_631
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_98
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_631: # %cond.load365
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 93
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 92
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 2
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_1027
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_99
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1027: # %cond.load365
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_100
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_632: # %cond.load377
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 96
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 95
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_633
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_104
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_633: # %cond.load381
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 97
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 96
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 2
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_634
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_105
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_634: # %cond.load385
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 98
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 97
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 4
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_635
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_106
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_635: # %cond.load389
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 99
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 98
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 8
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_636
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_107
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_636: # %cond.load393
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 100
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 99
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 16
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_637
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_108
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_637: # %cond.load397
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 101
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 100
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 32
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_638
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_109
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_638: # %cond.load401
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 102
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 101
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 64
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_639
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_110
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_639: # %cond.load405
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 103
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 102
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 128
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_640
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_111
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_640: # %cond.load409
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 104
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 103
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 256
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_641
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_112
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_641: # %cond.load413
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 105
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 104
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 512
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_642
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_113
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_642: # %cond.load417
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 106
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 105
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_643
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_114
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_643: # %cond.load421
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 107
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 106
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 20
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_644
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_115
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_644: # %cond.load425
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 108
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 107
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 19
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_645
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_116
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_645: # %cond.load429
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 109
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 108
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 18
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_646
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_117
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_646: # %cond.load433
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 110
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 109
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 17
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_647
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_118
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_647: # %cond.load437
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 111
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 110
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 16
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_648
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_119
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_648: # %cond.load441
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 112
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 111
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 15
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_649
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_120
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_649: # %cond.load445
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 113
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 112
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 14
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_650
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_121
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_650: # %cond.load449
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 114
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 113
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 13
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_651
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_122
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_651: # %cond.load453
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 115
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 114
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 12
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_652
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_123
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_652: # %cond.load457
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 116
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 115
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 11
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_653
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_124
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_653: # %cond.load461
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 117
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 116
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 10
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_654
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_125
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_654: # %cond.load465
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 118
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 117
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 9
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_655
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_126
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_655: # %cond.load469
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 119
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 118
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 8
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_656
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_127
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_656: # %cond.load473
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 120
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 119
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 7
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_657
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_128
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_657: # %cond.load477
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 121
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 120
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 6
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_658
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_129
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_658: # %cond.load481
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 122
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 121
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 5
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_659
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_130
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_659: # %cond.load485
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 123
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 122
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 4
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_660
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_131
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_660: # %cond.load489
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 124
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 123
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 3
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_661
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_132
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_661: # %cond.load493
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 125
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 124
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 2
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_1028
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_133
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1028: # %cond.load493
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_134
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_662: # %cond.load505
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 128
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 127
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_663
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_138
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_663: # %cond.load509
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 129
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 128
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 2
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_664
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_139
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_664: # %cond.load513
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 130
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 129
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 4
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_665
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_140
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_665: # %cond.load517
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 131
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 130
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 8
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_666
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_141
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_666: # %cond.load521
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 132
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 131
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 16
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_667
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_142
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_667: # %cond.load525
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 133
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 132
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 32
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_668
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_143
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_668: # %cond.load529
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 134
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 133
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 64
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_669
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_144
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_669: # %cond.load533
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 135
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 134
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 128
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_670
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_145
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_670: # %cond.load537
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 136
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 135
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 256
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_671
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_146
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_671: # %cond.load541
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 137
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 136
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_672
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_147
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_672: # %cond.load545
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 138
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 137
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_673
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_148
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_673: # %cond.load549
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 139
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 138
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 20
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_674
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_149
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_674: # %cond.load553
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 140
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 139
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 19
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_675
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_150
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_675: # %cond.load557
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 141
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 140
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 18
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_676
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_151
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_676: # %cond.load561
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 142
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 141
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 17
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_677
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_152
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_677: # %cond.load565
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 143
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 142
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 16
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_678
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_153
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_678: # %cond.load569
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 144
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 143
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 15
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_679
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_154
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_679: # %cond.load573
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 145
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 144
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 14
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_680
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_155
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_680: # %cond.load577
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 146
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 145
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 13
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_681
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_156
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_681: # %cond.load581
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 147
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 146
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 12
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_682
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_157
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_682: # %cond.load585
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 148
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 147
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 11
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_683
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_158
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_683: # %cond.load589
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 149
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 148
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 10
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_684
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_159
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_684: # %cond.load593
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 150
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 149
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 9
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_685
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_160
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_685: # %cond.load597
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 151
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 150
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 8
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_686
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_161
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_686: # %cond.load601
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 152
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 151
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 7
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_687
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_162
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_687: # %cond.load605
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 153
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 152
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 6
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_688
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_163
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_688: # %cond.load609
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 154
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 153
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 5
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_689
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_164
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_689: # %cond.load613
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 155
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 154
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 4
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_690
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_165
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_690: # %cond.load617
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 156
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 155
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 3
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_691
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_166
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_691: # %cond.load621
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 157
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 156
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 2
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_1029
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_167
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1029: # %cond.load621
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_168
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_692: # %cond.load633
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 160
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 159
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_693
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_172
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_693: # %cond.load637
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 161
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 160
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 2
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_694
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_173
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_694: # %cond.load641
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 162
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 161
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 4
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_695
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_174
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_695: # %cond.load645
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 163
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 162
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 8
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_696
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_175
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_696: # %cond.load649
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 164
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 163
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 16
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_697
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_176
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_697: # %cond.load653
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 165
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 164
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 32
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_698
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_177
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_698: # %cond.load657
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 166
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 165
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 64
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_699
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_178
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_699: # %cond.load661
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 167
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 166
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 128
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_700
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_179
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_700: # %cond.load665
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 168
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 167
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 256
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_701
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_180
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_701: # %cond.load669
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 169
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 168
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 512
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_702
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_181
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_702: # %cond.load673
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 170
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 169
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_703
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_182
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_703: # %cond.load677
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 171
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 170
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 20
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_704
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_183
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_704: # %cond.load681
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 172
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 171
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 19
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_705
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_184
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_705: # %cond.load685
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 173
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 172
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 18
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_706
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_185
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_706: # %cond.load689
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 174
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 173
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 17
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_707
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_186
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_707: # %cond.load693
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 175
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 174
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 16
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_708
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_187
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_708: # %cond.load697
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 176
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 175
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 15
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_709
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_188
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_709: # %cond.load701
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 177
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 176
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 14
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_710
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_189
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_710: # %cond.load705
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 178
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 177
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 13
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_711
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_190
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_711: # %cond.load709
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 179
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 178
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 12
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_712
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_191
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_712: # %cond.load713
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 180
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 179
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 11
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_713
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_192
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_713: # %cond.load717
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 181
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 180
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 10
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_714
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_193
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_714: # %cond.load721
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 182
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 181
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 9
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_715
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_194
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_715: # %cond.load725
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 183
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 182
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 8
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_716
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_195
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_716: # %cond.load729
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 184
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 183
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 7
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_717
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_196
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_717: # %cond.load733
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 185
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 184
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 6
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_718
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_197
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_718: # %cond.load737
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 186
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 185
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 5
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_719
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_198
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_719: # %cond.load741
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 187
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 186
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 4
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_720
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_199
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_720: # %cond.load745
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 188
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 187
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 3
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_721
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_200
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_721: # %cond.load749
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 189
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 188
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 2
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_1030
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_201
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1030: # %cond.load749
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_202
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_722: # %cond.load761
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 192
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 191
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_723
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_206
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_723: # %cond.load765
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 193
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 192
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 2
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_724
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_207
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_724: # %cond.load769
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 194
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 193
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 4
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_725
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_208
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_725: # %cond.load773
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 195
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 194
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 8
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_726
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_209
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_726: # %cond.load777
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 196
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 195
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 16
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_727
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_210
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_727: # %cond.load781
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 197
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 196
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 32
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_728
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_211
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_728: # %cond.load785
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 198
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 197
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 64
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_729
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_212
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_729: # %cond.load789
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 199
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 198
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 128
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_730
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_213
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_730: # %cond.load793
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 200
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 199
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 256
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_731
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_214
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_731: # %cond.load797
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 201
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 200
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_732
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_215
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_732: # %cond.load801
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 202
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 201
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_733
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_216
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_733: # %cond.load805
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 203
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 202
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 20
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_734
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_217
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_734: # %cond.load809
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 204
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 203
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 19
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_735
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_218
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_735: # %cond.load813
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 205
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 204
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 18
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_736
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_219
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_736: # %cond.load817
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 206
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 205
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 17
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_737
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_220
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_737: # %cond.load821
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 207
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 206
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 16
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_738
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_221
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_738: # %cond.load825
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 208
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 207
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 15
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_739
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_222
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_739: # %cond.load829
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 209
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 208
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 14
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_740
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_223
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_740: # %cond.load833
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 210
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 209
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 13
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_741
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_224
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_741: # %cond.load837
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 211
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 210
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 12
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_742
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_225
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_742: # %cond.load841
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 212
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 211
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 11
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_743
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_226
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_743: # %cond.load845
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 213
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 212
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 10
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_744
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_227
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_744: # %cond.load849
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 214
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 213
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 9
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_745
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_228
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_745: # %cond.load853
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 215
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 214
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 8
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_746
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_229
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_746: # %cond.load857
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 216
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 215
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 7
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_747
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_230
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_747: # %cond.load861
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 217
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 216
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 6
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_748
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_231
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_748: # %cond.load865
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 218
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 217
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 5
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_749
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_232
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_749: # %cond.load869
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 219
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 218
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 4
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_750
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_233
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_750: # %cond.load873
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 220
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 219
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 3
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_751
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_234
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_751: # %cond.load877
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 221
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 220
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 2
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_1031
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_235
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1031: # %cond.load877
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_236
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_752: # %cond.load889
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 224
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 223
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_753
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_240
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_753: # %cond.load893
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 225
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 224
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 2
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_754
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_241
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_754: # %cond.load897
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 226
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 225
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 4
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_755
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_242
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_755: # %cond.load901
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 227
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 226
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 8
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_756
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_243
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_756: # %cond.load905
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 228
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 227
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 16
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_757
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_244
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_757: # %cond.load909
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 229
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 228
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 32
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_758
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_245
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_758: # %cond.load913
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 230
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 229
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 64
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_759
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_246
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_759: # %cond.load917
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 231
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 230
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 128
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_760
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_247
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_760: # %cond.load921
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 232
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 231
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 256
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_761
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_248
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_761: # %cond.load925
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 233
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 232
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 512
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_762
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_249
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_762: # %cond.load929
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 234
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 233
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_763
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_250
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_763: # %cond.load933
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 235
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 234
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 20
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_764
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_251
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_764: # %cond.load937
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 236
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 235
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 19
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_765
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_252
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_765: # %cond.load941
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 237
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 236
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 18
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_766
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_253
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_766: # %cond.load945
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 238
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 237
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 17
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_767
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_254
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_767: # %cond.load949
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 239
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 238
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 16
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_768
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_255
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_768: # %cond.load953
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 240
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 239
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 15
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_769
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_256
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_769: # %cond.load957
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 241
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 240
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 14
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_770
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_257
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_770: # %cond.load961
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 242
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 241
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 13
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_771
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_258
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_771: # %cond.load965
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 243
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 242
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 12
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_772
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_259
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_772: # %cond.load969
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 244
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 243
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 11
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_773
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_260
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_773: # %cond.load973
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 245
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 244
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 10
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_774
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_261
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_774: # %cond.load977
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 246
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 245
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 9
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_775
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_262
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_775: # %cond.load981
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 247
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 246
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 8
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_776
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_263
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_776: # %cond.load985
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 248
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 247
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 7
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_777
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_264
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_777: # %cond.load989
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 249
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 248
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 6
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_778
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_265
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_778: # %cond.load993
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 250
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 249
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 5
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_779
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_266
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_779: # %cond.load997
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 251
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 250
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 4
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_780
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_267
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_780: # %cond.load1001
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 252
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 251
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 3
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_781
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_268
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_781: # %cond.load1005
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 253
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 252
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 2
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_1032
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_269
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1032: # %cond.load1005
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_270
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_782: # %cond.load1017
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 256
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 255
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_783
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_274
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_783: # %cond.load1021
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 257
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 256
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 2
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_784
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_275
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_784: # %cond.load1025
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 258
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 257
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 4
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_785
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_276
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_785: # %cond.load1029
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 259
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 258
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 8
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_786
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_277
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_786: # %cond.load1033
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 260
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 259
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 16
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_787
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_278
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_787: # %cond.load1037
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 261
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 260
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 32
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_788
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_279
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_788: # %cond.load1041
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 262
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 261
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 64
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_789
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_280
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_789: # %cond.load1045
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 263
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 262
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 128
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_790
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_281
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_790: # %cond.load1049
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 264
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 263
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 256
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_791
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_282
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_791: # %cond.load1053
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 265
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 264
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_792
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_283
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_792: # %cond.load1057
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 266
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 265
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_793
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_284
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_793: # %cond.load1061
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 267
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 266
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 20
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_794
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_285
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_794: # %cond.load1065
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 268
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 267
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 19
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_795
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_286
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_795: # %cond.load1069
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 269
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 268
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 18
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_796
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_287
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_796: # %cond.load1073
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 270
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 269
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 17
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_797
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_288
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_797: # %cond.load1077
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 271
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 270
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 16
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_798
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_289
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_798: # %cond.load1081
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 272
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 271
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 15
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_799
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_290
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_799: # %cond.load1085
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 273
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 272
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 14
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_800
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_291
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_800: # %cond.load1089
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 274
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 273
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 13
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_801
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_292
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_801: # %cond.load1093
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 275
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 274
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 12
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_802
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_293
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_802: # %cond.load1097
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 276
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 275
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 11
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_803
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_294
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_803: # %cond.load1101
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 277
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 276
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 10
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_804
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_295
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_804: # %cond.load1105
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 278
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 277
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 9
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_805
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_296
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_805: # %cond.load1109
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 279
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 278
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 8
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_806
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_297
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_806: # %cond.load1113
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 280
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 279
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 7
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_807
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_298
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_807: # %cond.load1117
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 281
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 280
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 6
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_808
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_299
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_808: # %cond.load1121
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 282
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 281
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 5
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_809
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_300
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_809: # %cond.load1125
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 283
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 282
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 4
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_810
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_301
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_810: # %cond.load1129
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 284
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 283
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 3
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_811
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_302
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_811: # %cond.load1133
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 285
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 284
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 2
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_1033
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_303
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1033: # %cond.load1133
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_304
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_812: # %cond.load1145
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 288
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 287
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_813
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_308
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_813: # %cond.load1149
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 289
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 288
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 2
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_814
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_309
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_814: # %cond.load1153
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 290
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 289
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 4
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_815
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_310
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_815: # %cond.load1157
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 291
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 290
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 8
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_816
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_311
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_816: # %cond.load1161
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 292
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 291
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 16
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_817
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_312
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_817: # %cond.load1165
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 293
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 292
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 32
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_818
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_313
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_818: # %cond.load1169
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 294
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 293
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 64
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_819
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_314
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_819: # %cond.load1173
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 295
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 294
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 128
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_820
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_315
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_820: # %cond.load1177
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 296
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 295
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 256
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_821
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_316
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_821: # %cond.load1181
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 297
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 296
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 512
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_822
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_317
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_822: # %cond.load1185
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 298
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 297
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_823
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_318
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_823: # %cond.load1189
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 299
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 298
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 20
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_824
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_319
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_824: # %cond.load1193
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 300
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 299
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 19
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_825
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_320
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_825: # %cond.load1197
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 301
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 300
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 18
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_826
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_321
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_826: # %cond.load1201
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 302
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 301
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 17
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_827
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_322
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_827: # %cond.load1205
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 303
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 302
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 16
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_828
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_323
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_828: # %cond.load1209
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 304
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 303
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 15
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_829
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_324
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_829: # %cond.load1213
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 305
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 304
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 14
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_830
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_325
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_830: # %cond.load1217
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 306
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 305
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 13
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_831
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_326
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_831: # %cond.load1221
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 307
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 306
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 12
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_832
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_327
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_832: # %cond.load1225
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 308
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 307
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 11
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_833
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_328
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_833: # %cond.load1229
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 309
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 308
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 10
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_834
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_329
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_834: # %cond.load1233
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 310
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 309
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 9
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_835
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_330
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_835: # %cond.load1237
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 311
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 310
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 8
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_836
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_331
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_836: # %cond.load1241
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 312
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 311
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 7
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_837
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_332
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_837: # %cond.load1245
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 313
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 312
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 6
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_838
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_333
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_838: # %cond.load1249
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 314
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 313
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 5
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_839
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_334
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_839: # %cond.load1253
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 315
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 314
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 4
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_840
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_335
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_840: # %cond.load1257
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 316
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 315
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 3
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_841
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_336
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_841: # %cond.load1261
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 317
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 316
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 2
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_1034
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_337
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1034: # %cond.load1261
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_338
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_842: # %cond.load1273
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 320
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 319
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_843
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_342
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_843: # %cond.load1277
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 321
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 320
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 2
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_844
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_343
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_844: # %cond.load1281
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 322
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 321
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 4
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_845
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_344
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_845: # %cond.load1285
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 323
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 322
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 8
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_846
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_345
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_846: # %cond.load1289
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 324
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 323
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 16
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_847
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_346
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_847: # %cond.load1293
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 325
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 324
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 32
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_848
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_347
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_848: # %cond.load1297
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 326
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 325
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 64
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_849
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_348
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_849: # %cond.load1301
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 327
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 326
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 128
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_850
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_349
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_850: # %cond.load1305
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 328
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 327
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 256
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_851
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_350
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_851: # %cond.load1309
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 329
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 328
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_852
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_351
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_852: # %cond.load1313
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 330
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 329
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_853
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_352
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_853: # %cond.load1317
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 331
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 330
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 20
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_854
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_353
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_854: # %cond.load1321
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 332
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 331
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 19
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_855
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_354
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_855: # %cond.load1325
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 333
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 332
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 18
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_856
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_355
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_856: # %cond.load1329
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 334
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 333
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 17
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_857
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_356
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_857: # %cond.load1333
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 335
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 334
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 16
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_858
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_357
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_858: # %cond.load1337
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 336
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 335
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 15
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_859
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_358
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_859: # %cond.load1341
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 337
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 336
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 14
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_860
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_359
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_860: # %cond.load1345
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 338
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 337
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 13
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_861
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_360
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_861: # %cond.load1349
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 339
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 338
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 12
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_862
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_361
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_862: # %cond.load1353
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 340
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 339
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 11
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_863
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_362
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_863: # %cond.load1357
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 341
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 340
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 10
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_864
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_363
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_864: # %cond.load1361
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 342
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 341
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 9
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_865
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_364
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_865: # %cond.load1365
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 343
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 342
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 8
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_866
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_365
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_866: # %cond.load1369
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 344
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 343
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 7
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_867
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_366
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_867: # %cond.load1373
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 345
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 344
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 6
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_868
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_367
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_868: # %cond.load1377
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 346
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 345
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 5
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_869
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_368
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_869: # %cond.load1381
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 347
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 346
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 4
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_870
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_369
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_870: # %cond.load1385
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 348
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 347
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 3
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_871
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_370
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_871: # %cond.load1389
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 349
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 348
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 2
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_1035
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_371
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1035: # %cond.load1389
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_372
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_872: # %cond.load1401
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 352
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 351
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_873
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_376
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_873: # %cond.load1405
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 353
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 352
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 2
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_874
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_377
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_874: # %cond.load1409
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 354
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 353
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 4
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_875
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_378
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_875: # %cond.load1413
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 355
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 354
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 8
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_876
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_379
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_876: # %cond.load1417
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 356
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 355
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 16
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_877
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_380
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_877: # %cond.load1421
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 357
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 356
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 32
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_878
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_381
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_878: # %cond.load1425
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 358
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 357
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 64
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_879
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_382
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_879: # %cond.load1429
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 359
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 358
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 128
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_880
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_383
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_880: # %cond.load1433
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 360
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 359
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 256
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_881
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_384
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_881: # %cond.load1437
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 361
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 360
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 512
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_882
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_385
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_882: # %cond.load1441
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 362
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 361
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_883
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_386
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_883: # %cond.load1445
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 363
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 362
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 20
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_884
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_387
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_884: # %cond.load1449
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 364
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 363
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 19
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_885
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_388
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_885: # %cond.load1453
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 365
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 364
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 18
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_886
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_389
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_886: # %cond.load1457
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 366
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 365
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 17
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_887
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_390
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_887: # %cond.load1461
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 367
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 366
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 16
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_888
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_391
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_888: # %cond.load1465
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 368
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 367
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 15
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_889
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_392
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_889: # %cond.load1469
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 369
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 368
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 14
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_890
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_393
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_890: # %cond.load1473
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 370
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 369
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 13
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_891
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_394
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_891: # %cond.load1477
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 371
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 370
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 12
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_892
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_395
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_892: # %cond.load1481
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 372
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 371
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 11
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_893
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_396
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_893: # %cond.load1485
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 373
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 372
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 10
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_894
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_397
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_894: # %cond.load1489
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 374
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 373
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 9
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_895
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_398
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_895: # %cond.load1493
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 375
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 374
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 8
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_896
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_399
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_896: # %cond.load1497
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 376
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 375
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 7
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_897
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_400
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_897: # %cond.load1501
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 377
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 376
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 6
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_898
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_401
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_898: # %cond.load1505
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 378
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 377
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 5
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_899
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_402
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_899: # %cond.load1509
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 379
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 378
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 4
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_900
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_403
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_900: # %cond.load1513
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 380
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 379
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 3
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_901
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_404
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_901: # %cond.load1517
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 381
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 380
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 2
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_1036
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_405
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1036: # %cond.load1517
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_406
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_902: # %cond.load1529
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 384
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 383
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_903
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_410
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_903: # %cond.load1533
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 385
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 384
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 2
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_904
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_411
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_904: # %cond.load1537
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 386
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 385
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 4
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_905
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_412
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_905: # %cond.load1541
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 387
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 386
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 8
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_906
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_413
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_906: # %cond.load1545
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 388
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 387
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 16
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_907
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_414
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_907: # %cond.load1549
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 389
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 388
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 32
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_908
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_415
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_908: # %cond.load1553
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 390
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 389
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 64
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_909
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_416
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_909: # %cond.load1557
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 391
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 390
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 128
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_910
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_417
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_910: # %cond.load1561
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 392
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 391
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 256
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_911
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_418
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_911: # %cond.load1565
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 393
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 392
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_912
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_419
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_912: # %cond.load1569
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 394
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 393
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_913
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_420
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_913: # %cond.load1573
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 395
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 394
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 20
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_914
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_421
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_914: # %cond.load1577
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 396
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 395
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 19
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_915
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_422
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_915: # %cond.load1581
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 397
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 396
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 18
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_916
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_423
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_916: # %cond.load1585
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 398
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 397
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 17
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_917
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_424
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_917: # %cond.load1589
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 399
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 398
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 16
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_918
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_425
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_918: # %cond.load1593
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 400
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 399
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 15
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_919
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_426
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_919: # %cond.load1597
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 401
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 400
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 14
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_920
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_427
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_920: # %cond.load1601
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 402
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 401
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 13
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_921
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_428
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_921: # %cond.load1605
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 403
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 402
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 12
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_922
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_429
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_922: # %cond.load1609
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 404
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 403
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 11
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_923
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_430
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_923: # %cond.load1613
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 405
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 404
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 10
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_924
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_431
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_924: # %cond.load1617
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 406
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 405
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 9
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_925
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_432
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_925: # %cond.load1621
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 407
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 406
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 8
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_926
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_433
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_926: # %cond.load1625
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 408
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 407
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 7
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_927
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_434
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_927: # %cond.load1629
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 409
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 408
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 6
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_928
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_435
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_928: # %cond.load1633
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 410
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 409
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 5
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_929
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_436
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_929: # %cond.load1637
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 411
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 410
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 4
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_930
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_437
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_930: # %cond.load1641
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 412
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 411
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 3
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_931
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_438
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_931: # %cond.load1645
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 413
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 412
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 2
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_1037
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_439
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1037: # %cond.load1645
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_440
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_932: # %cond.load1657
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 416
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 415
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_933
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_444
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_933: # %cond.load1661
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 417
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 416
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 2
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_934
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_445
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_934: # %cond.load1665
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 418
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 417
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 4
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_935
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_446
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_935: # %cond.load1669
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 419
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 418
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 8
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_936
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_447
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_936: # %cond.load1673
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 420
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 419
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 16
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_937
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_448
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_937: # %cond.load1677
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 421
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 420
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 32
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_938
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_449
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_938: # %cond.load1681
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 422
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 421
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 64
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_939
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_450
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_939: # %cond.load1685
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 423
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 422
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 128
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_940
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_451
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_940: # %cond.load1689
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 424
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 423
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 256
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_941
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_452
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_941: # %cond.load1693
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 425
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 424
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 512
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_942
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_453
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_942: # %cond.load1697
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 426
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 425
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-VRGATHER-RV32-NEXT:    bnez a3, .LBB61_943
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_454
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_943: # %cond.load1701
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 427
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 426
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 20
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_944
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_455
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_944: # %cond.load1705
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 428
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 427
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 19
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_945
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_456
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_945: # %cond.load1709
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 429
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 428
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 18
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_946
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_457
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_946: # %cond.load1713
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 430
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 429
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 17
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_947
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_458
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_947: # %cond.load1717
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 431
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 430
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 16
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_948
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_459
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_948: # %cond.load1721
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 432
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 431
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 15
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_949
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_460
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_949: # %cond.load1725
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 433
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 432
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 14
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_950
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_461
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_950: # %cond.load1729
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 434
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 433
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 13
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_951
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_462
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_951: # %cond.load1733
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 435
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 434
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 12
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_952
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_463
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_952: # %cond.load1737
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 436
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 435
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 11
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_953
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_464
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_953: # %cond.load1741
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 437
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 436
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 10
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_954
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_465
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_954: # %cond.load1745
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 438
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 437
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 9
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_955
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_466
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_955: # %cond.load1749
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 439
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 438
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 8
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_956
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_467
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_956: # %cond.load1753
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 440
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 439
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 7
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_957
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_468
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_957: # %cond.load1757
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 441
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 440
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 6
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_958
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_469
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_958: # %cond.load1761
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 442
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 441
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 5
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_959
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_470
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_959: # %cond.load1765
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 443
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 442
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 4
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_960
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_471
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_960: # %cond.load1769
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 444
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 443
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 3
-; CHECK-VRGATHER-RV32-NEXT:    bltz a3, .LBB61_961
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_472
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_961: # %cond.load1773
-; CHECK-VRGATHER-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 445
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 444
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a3, a2, 2
-; CHECK-VRGATHER-RV32-NEXT:    bgez a3, .LBB61_1038
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_473
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1038: # %cond.load1773
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_474
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_962: # %cond.load1785
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 448
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 447
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_963
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_478
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_963: # %cond.load1789
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 449
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 448
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 2
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_964
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_479
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_964: # %cond.load1793
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 450
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 449
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 4
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_965
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_480
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_965: # %cond.load1797
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 451
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 450
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 8
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_966
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_481
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_966: # %cond.load1801
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 452
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 451
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 16
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_967
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_482
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_967: # %cond.load1805
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 453
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 452
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 32
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_968
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_483
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_968: # %cond.load1809
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 454
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 453
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 64
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_969
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_484
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_969: # %cond.load1813
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 455
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 454
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 128
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_970
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_485
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_970: # %cond.load1817
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 456
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 455
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 256
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_971
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_486
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_971: # %cond.load1821
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 457
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 456
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_972
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_487
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_972: # %cond.load1825
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 458
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 457
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_973
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_488
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_973: # %cond.load1829
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 459
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 458
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 20
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_974
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_489
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_974: # %cond.load1833
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 460
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 459
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 19
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_975
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_490
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_975: # %cond.load1837
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 461
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 460
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 18
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_976
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_491
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_976: # %cond.load1841
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 462
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 461
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 17
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_977
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_492
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_977: # %cond.load1845
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 463
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 462
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 16
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_978
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_493
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_978: # %cond.load1849
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 464
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 463
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 15
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_979
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_494
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_979: # %cond.load1853
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 465
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 464
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 14
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_980
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_495
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_980: # %cond.load1857
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 466
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 465
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 13
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_981
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_496
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_981: # %cond.load1861
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 467
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 466
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 12
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_982
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_497
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_982: # %cond.load1865
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 468
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 467
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 11
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_983
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_498
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_983: # %cond.load1869
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 469
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 468
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 10
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_984
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_499
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_984: # %cond.load1873
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 470
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 469
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 9
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_985
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_500
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_985: # %cond.load1877
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 471
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 470
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 8
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_986
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_501
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_986: # %cond.load1881
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 472
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 471
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 7
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_987
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_502
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_987: # %cond.load1885
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 473
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 472
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 6
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_988
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_503
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_988: # %cond.load1889
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 474
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 473
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 5
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_989
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_504
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_989: # %cond.load1893
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 475
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 474
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 4
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_990
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_505
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_990: # %cond.load1897
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 476
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 475
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 3
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_991
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_506
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_991: # %cond.load1901
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 477
-; CHECK-VRGATHER-RV32-NEXT:    li a4, 476
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a3, 2
-; CHECK-VRGATHER-RV32-NEXT:    bgez a2, .LBB61_1039
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_507
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1039: # %cond.load1901
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_508
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_992: # %cond.load1913
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 480
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 479
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 1
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_993
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_512
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_993: # %cond.load1917
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 481
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 480
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 2
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_994
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_513
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_994: # %cond.load1921
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 482
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 481
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 4
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_995
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_514
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_995: # %cond.load1925
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 483
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 482
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 8
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_996
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_515
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_996: # %cond.load1929
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 484
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 483
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 16
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_997
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_516
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_997: # %cond.load1933
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 485
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 484
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 32
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_998
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_517
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_998: # %cond.load1937
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 486
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 485
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 64
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_999
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_518
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_999: # %cond.load1941
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 487
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 486
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 128
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_1000
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_519
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1000: # %cond.load1945
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 488
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 487
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 256
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_1001
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_520
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1001: # %cond.load1949
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 489
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 488
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 512
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_1002
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_521
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1002: # %cond.load1953
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 490
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 489
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    andi a2, a1, 1024
-; CHECK-VRGATHER-RV32-NEXT:    bnez a2, .LBB61_1003
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_522
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1003: # %cond.load1957
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 491
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 490
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 20
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_1004
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_523
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1004: # %cond.load1961
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 492
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 491
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 19
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_1005
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_524
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1005: # %cond.load1965
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 493
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 492
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 18
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_1006
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_525
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1006: # %cond.load1969
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 494
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 493
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 17
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_1007
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_526
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1007: # %cond.load1973
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 495
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 494
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 16
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_1008
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_527
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1008: # %cond.load1977
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 496
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 495
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 15
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_1009
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_528
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1009: # %cond.load1981
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 497
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 496
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 14
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_1010
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_529
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1010: # %cond.load1985
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 498
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 497
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 13
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_1011
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_530
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1011: # %cond.load1989
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 499
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 498
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 12
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_1012
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_531
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1012: # %cond.load1993
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 500
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 499
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 11
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_1013
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_532
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1013: # %cond.load1997
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 501
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 500
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 10
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_1014
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_533
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1014: # %cond.load2001
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 502
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 501
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 9
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_1015
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_534
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1015: # %cond.load2005
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 503
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 502
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 8
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_1016
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_535
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1016: # %cond.load2009
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 504
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 503
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 7
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_1017
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_536
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1017: # %cond.load2013
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 505
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 504
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 6
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_1018
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_537
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1018: # %cond.load2017
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 506
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 505
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 5
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_1019
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_538
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1019: # %cond.load2021
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 507
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 506
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 4
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_1020
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_539
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1020: # %cond.load2025
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 508
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 507
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 3
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_1021
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_540
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1021: # %cond.load2029
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 509
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 508
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 2
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_1022
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_541
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1022: # %cond.load2033
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 510
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 509
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    slli a2, a1, 1
-; CHECK-VRGATHER-RV32-NEXT:    bltz a2, .LBB61_1023
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_542
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1023: # %cond.load2037
-; CHECK-VRGATHER-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV32-NEXT:    li a2, 511
-; CHECK-VRGATHER-RV32-NEXT:    li a3, 510
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV32-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV32-NEXT:    bltz a1, .LBB61_1024
-; CHECK-VRGATHER-RV32-NEXT:    j .LBB61_543
-; CHECK-VRGATHER-RV32-NEXT:  .LBB61_1024: # %cond.load2041
-; CHECK-VRGATHER-RV32-NEXT:    lbu a0, 0(a0)
-; CHECK-VRGATHER-RV32-NEXT:    li a1, 512
-; CHECK-VRGATHER-RV32-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
-; CHECK-VRGATHER-RV32-NEXT:    vmv.s.x v16, a0
-; CHECK-VRGATHER-RV32-NEXT:    li a0, 511
-; CHECK-VRGATHER-RV32-NEXT:    vslideup.vx v8, v16, a0
-; CHECK-VRGATHER-RV32-NEXT:    ret
-;
-; CHECK-VRGATHER-RV64-LABEL: test_expandload_v512i8_vlen512:
-; CHECK-VRGATHER-RV64:       # %bb.0:
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.x.s a2, v0
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 1
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_1
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_527
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1: # %else
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 2
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_2
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_528
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_2: # %else2
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 4
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_3
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_529
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_3: # %else6
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 8
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_4
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_530
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_4: # %else10
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 16
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_5
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_531
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_5: # %else14
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 32
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_6
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_532
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_6: # %else18
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 64
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_7
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_533
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_7: # %else22
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 128
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_8
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_534
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_8: # %else26
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 256
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_9
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_535
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_9: # %else30
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 512
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_10
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_536
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_10: # %else34
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 1024
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_11
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_537
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_11: # %else38
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 52
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_12
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_538
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_12: # %else42
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 51
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_13
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_539
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_13: # %else46
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 50
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_14
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_540
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_14: # %else50
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 49
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_15
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_541
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_15: # %else54
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 48
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_16
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_542
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_16: # %else58
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 47
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_17
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_543
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_17: # %else62
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 46
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_18
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_544
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_18: # %else66
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 45
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_19
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_545
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_19: # %else70
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 44
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_20
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_546
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_20: # %else74
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 43
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_21
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_547
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_21: # %else78
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 42
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_22
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_548
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_22: # %else82
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 41
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_23
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_549
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_23: # %else86
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 40
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_24
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_550
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_24: # %else90
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 39
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_25
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_551
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_25: # %else94
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 38
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_26
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_552
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_26: # %else98
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 37
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_27
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_553
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_27: # %else102
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 36
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_28
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_554
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_28: # %else106
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 35
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_29
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_555
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_29: # %else110
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 34
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_30
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_556
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_30: # %else114
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 33
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_31
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_557
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_31: # %else118
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 32
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_32
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_558
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_32: # %else122
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 31
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_33
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_559
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_33: # %else126
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 30
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_34
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_560
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_34: # %else130
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 29
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_35
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_561
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_35: # %else134
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 28
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_36
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_562
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_36: # %else138
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 27
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_37
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_563
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_37: # %else142
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 26
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_38
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_564
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_38: # %else146
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 25
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_39
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_565
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_39: # %else150
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 24
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_40
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_566
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_40: # %else154
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 23
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_41
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_567
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_41: # %else158
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 22
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_42
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_568
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_42: # %else162
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 21
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_43
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_569
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_43: # %else166
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 20
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_44
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_570
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_44: # %else170
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 19
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_45
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_571
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_45: # %else174
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 18
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_46
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_572
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_46: # %else178
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 17
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_47
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_573
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_47: # %else182
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 16
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_48
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_574
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_48: # %else186
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 15
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_49
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_575
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_49: # %else190
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 14
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_50
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_576
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_50: # %else194
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 13
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_51
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_577
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_51: # %else198
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 12
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_52
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_578
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_52: # %else202
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 11
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_53
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_579
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_53: # %else206
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 10
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_54
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_580
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_54: # %else210
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 9
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_55
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_581
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_55: # %else214
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 8
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_56
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_582
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_56: # %else218
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 7
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_57
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_583
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_57: # %else222
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 6
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_58
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_584
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_58: # %else226
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 5
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_59
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_585
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_59: # %else230
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 4
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_60
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_586
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_60: # %else234
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 3
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_61
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_587
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_61: # %else238
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 2
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_63
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_62: # %cond.load241
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 62
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 61
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_63: # %else242
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 1
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslidedown.vi v16, v0, 1
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_65
-; CHECK-VRGATHER-RV64-NEXT:  # %bb.64: # %cond.load245
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v17, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 63
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 62
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v17, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_65: # %else246
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.x.s a1, v16
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_66
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_588
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_66: # %else250
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 1
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_67
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_589
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_67: # %else254
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 2
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_68
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_590
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_68: # %else258
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 4
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_69
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_591
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_69: # %else262
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 8
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_70
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_592
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_70: # %else266
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 16
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_71
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_593
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_71: # %else270
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 32
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_72
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_594
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_72: # %else274
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 64
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_73
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_595
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_73: # %else278
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 128
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_74
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_596
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_74: # %else282
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 256
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_75
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_597
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_75: # %else286
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 512
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_76
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_598
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_76: # %else290
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 1024
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_77
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_599
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_77: # %else294
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 52
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_78
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_600
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_78: # %else298
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 51
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_79
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_601
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_79: # %else302
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 50
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_80
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_602
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_80: # %else306
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 49
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_81
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_603
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_81: # %else310
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 48
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_82
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_604
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_82: # %else314
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 47
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_83
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_605
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_83: # %else318
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 46
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_84
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_606
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_84: # %else322
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 45
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_85
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_607
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_85: # %else326
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 44
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_86
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_608
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_86: # %else330
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 43
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_87
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_609
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_87: # %else334
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 42
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_88
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_610
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_88: # %else338
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 41
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_89
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_611
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_89: # %else342
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 40
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_90
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_612
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_90: # %else346
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 39
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_91
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_613
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_91: # %else350
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 38
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_92
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_614
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_92: # %else354
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 37
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_93
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_615
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_93: # %else358
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 36
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_94
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_616
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_94: # %else362
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 35
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_95
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_617
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_95: # %else366
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 34
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_96
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_618
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_96: # %else370
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 33
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_97
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_619
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_97: # %else374
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 32
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_98
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_620
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_98: # %else378
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 31
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_99
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_621
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_99: # %else382
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 30
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_100
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_622
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_100: # %else386
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 29
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_101
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_623
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_101: # %else390
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 28
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_102
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_624
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_102: # %else394
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 27
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_103
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_625
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_103: # %else398
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 26
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_104
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_626
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_104: # %else402
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 25
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_105
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_627
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_105: # %else406
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 24
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_106
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_628
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_106: # %else410
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 23
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_107
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_629
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_107: # %else414
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 22
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_108
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_630
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_108: # %else418
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 21
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_109
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_631
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_109: # %else422
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 20
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_110
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_632
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_110: # %else426
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 19
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_111
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_633
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_111: # %else430
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 18
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_112
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_634
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_112: # %else434
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 17
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_113
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_635
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_113: # %else438
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 16
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_114
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_636
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_114: # %else442
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 15
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_115
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_637
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_115: # %else446
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 14
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_116
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_638
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_116: # %else450
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 13
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_117
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_639
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_117: # %else454
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 12
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_118
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_640
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_118: # %else458
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 11
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_119
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_641
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_119: # %else462
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 10
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_120
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_642
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_120: # %else466
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 9
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_121
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_643
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_121: # %else470
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 8
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_122
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_644
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_122: # %else474
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 7
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_123
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_645
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_123: # %else478
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 6
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_124
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_646
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_124: # %else482
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 5
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_125
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_647
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_125: # %else486
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 4
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_126
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_648
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_126: # %else490
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 3
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_127
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_649
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_127: # %else494
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 2
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_129
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_128: # %cond.load497
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 126
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 125
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_129: # %else498
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 1
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslidedown.vi v16, v0, 2
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_131
-; CHECK-VRGATHER-RV64-NEXT:  # %bb.130: # %cond.load501
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v18, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 127
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 126
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v18, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_131: # %else502
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.x.s a2, v16
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_132
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_650
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_132: # %else506
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 1
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_133
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_651
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_133: # %else510
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 2
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_134
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_652
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_134: # %else514
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 4
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_135
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_653
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_135: # %else518
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 8
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_136
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_654
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_136: # %else522
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 16
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_137
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_655
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_137: # %else526
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 32
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_138
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_656
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_138: # %else530
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 64
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_139
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_657
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_139: # %else534
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 128
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_140
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_658
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_140: # %else538
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 256
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_141
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_659
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_141: # %else542
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 512
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_142
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_660
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_142: # %else546
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 1024
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_143
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_661
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_143: # %else550
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 52
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_144
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_662
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_144: # %else554
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 51
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_145
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_663
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_145: # %else558
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 50
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_146
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_664
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_146: # %else562
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 49
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_147
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_665
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_147: # %else566
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 48
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_148
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_666
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_148: # %else570
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 47
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_149
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_667
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_149: # %else574
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 46
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_150
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_668
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_150: # %else578
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 45
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_151
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_669
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_151: # %else582
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 44
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_152
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_670
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_152: # %else586
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 43
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_153
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_671
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_153: # %else590
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 42
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_154
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_672
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_154: # %else594
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 41
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_155
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_673
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_155: # %else598
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 40
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_156
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_674
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_156: # %else602
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 39
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_157
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_675
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_157: # %else606
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 38
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_158
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_676
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_158: # %else610
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 37
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_159
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_677
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_159: # %else614
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 36
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_160
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_678
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_160: # %else618
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 35
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_161
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_679
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_161: # %else622
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 34
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_162
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_680
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_162: # %else626
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 33
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_163
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_681
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_163: # %else630
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 32
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_164
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_682
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_164: # %else634
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 31
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_165
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_683
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_165: # %else638
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 30
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_166
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_684
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_166: # %else642
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 29
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_167
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_685
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_167: # %else646
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 28
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_168
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_686
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_168: # %else650
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 27
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_169
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_687
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_169: # %else654
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 26
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_170
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_688
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_170: # %else658
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 25
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_171
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_689
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_171: # %else662
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 24
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_172
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_690
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_172: # %else666
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 23
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_173
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_691
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_173: # %else670
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 22
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_174
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_692
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_174: # %else674
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 21
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_175
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_693
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_175: # %else678
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 20
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_176
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_694
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_176: # %else682
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 19
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_177
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_695
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_177: # %else686
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 18
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_178
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_696
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_178: # %else690
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 17
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_179
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_697
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_179: # %else694
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 16
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_180
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_698
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_180: # %else698
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 15
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_181
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_699
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_181: # %else702
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 14
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_182
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_700
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_182: # %else706
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 13
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_183
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_701
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_183: # %else710
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 12
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_184
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_702
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_184: # %else714
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 11
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_185
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_703
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_185: # %else718
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 10
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_186
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_704
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_186: # %else722
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 9
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_187
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_705
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_187: # %else726
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 8
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_188
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_706
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_188: # %else730
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 7
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_189
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_707
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_189: # %else734
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 6
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_190
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_708
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_190: # %else738
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 5
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_191
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_709
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_191: # %else742
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 4
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_192
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_710
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_192: # %else746
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 3
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_193
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_711
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_193: # %else750
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 2
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_195
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_194: # %cond.load753
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 190
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 189
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_195: # %else754
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 1
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslidedown.vi v16, v0, 3
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_197
-; CHECK-VRGATHER-RV64-NEXT:  # %bb.196: # %cond.load757
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v20, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 191
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 190
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v20, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_197: # %else758
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.x.s a1, v16
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_198
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_712
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_198: # %else762
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 1
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_199
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_713
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_199: # %else766
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 2
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_200
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_714
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_200: # %else770
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 4
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_201
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_715
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_201: # %else774
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 8
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_202
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_716
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_202: # %else778
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 16
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_203
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_717
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_203: # %else782
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 32
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_204
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_718
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_204: # %else786
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 64
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_205
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_719
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_205: # %else790
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 128
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_206
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_720
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_206: # %else794
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 256
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_207
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_721
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_207: # %else798
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 512
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_208
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_722
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_208: # %else802
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 1024
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_209
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_723
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_209: # %else806
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 52
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_210
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_724
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_210: # %else810
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 51
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_211
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_725
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_211: # %else814
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 50
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_212
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_726
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_212: # %else818
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 49
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_213
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_727
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_213: # %else822
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 48
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_214
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_728
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_214: # %else826
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 47
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_215
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_729
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_215: # %else830
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 46
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_216
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_730
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_216: # %else834
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 45
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_217
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_731
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_217: # %else838
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 44
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_218
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_732
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_218: # %else842
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 43
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_219
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_733
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_219: # %else846
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 42
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_220
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_734
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_220: # %else850
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 41
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_221
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_735
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_221: # %else854
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 40
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_222
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_736
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_222: # %else858
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 39
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_223
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_737
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_223: # %else862
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 38
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_224
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_738
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_224: # %else866
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 37
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_225
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_739
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_225: # %else870
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 36
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_226
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_740
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_226: # %else874
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 35
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_227
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_741
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_227: # %else878
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 34
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_228
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_742
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_228: # %else882
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 33
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_229
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_743
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_229: # %else886
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 32
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_230
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_744
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_230: # %else890
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 31
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_231
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_745
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_231: # %else894
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 30
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_232
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_746
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_232: # %else898
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 29
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_233
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_747
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_233: # %else902
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 28
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_234
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_748
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_234: # %else906
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 27
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_235
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_749
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_235: # %else910
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 26
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_236
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_750
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_236: # %else914
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 25
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_237
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_751
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_237: # %else918
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 24
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_238
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_752
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_238: # %else922
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 23
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_239
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_753
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_239: # %else926
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 22
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_240
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_754
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_240: # %else930
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 21
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_241
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_755
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_241: # %else934
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 20
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_242
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_756
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_242: # %else938
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 19
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_243
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_757
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_243: # %else942
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 18
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_244
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_758
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_244: # %else946
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 17
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_245
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_759
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_245: # %else950
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 16
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_246
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_760
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_246: # %else954
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 15
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_247
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_761
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_247: # %else958
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 14
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_248
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_762
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_248: # %else962
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 13
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_249
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_763
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_249: # %else966
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 12
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_250
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_764
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_250: # %else970
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 11
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_251
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_765
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_251: # %else974
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 10
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_252
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_766
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_252: # %else978
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 9
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_253
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_767
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_253: # %else982
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 8
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_254
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_768
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_254: # %else986
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 7
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_255
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_769
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_255: # %else990
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 6
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_256
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_770
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_256: # %else994
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 5
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_257
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_771
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_257: # %else998
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 4
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_258
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_772
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_258: # %else1002
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 3
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_259
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_773
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_259: # %else1006
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 2
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_261
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_260: # %cond.load1009
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 254
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 253
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_261: # %else1010
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 1
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslidedown.vi v16, v0, 4
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_263
-; CHECK-VRGATHER-RV64-NEXT:  # %bb.262: # %cond.load1013
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v20, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 255
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 254
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v20, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_263: # %else1014
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.x.s a2, v16
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_264
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_774
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_264: # %else1018
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 1
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_265
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_775
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_265: # %else1022
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 2
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_266
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_776
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_266: # %else1026
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 4
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_267
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_777
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_267: # %else1030
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 8
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_268
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_778
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_268: # %else1034
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 16
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_269
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_779
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_269: # %else1038
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 32
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_270
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_780
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_270: # %else1042
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 64
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_271
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_781
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_271: # %else1046
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 128
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_272
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_782
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_272: # %else1050
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 256
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_273
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_783
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_273: # %else1054
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 512
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_274
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_784
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_274: # %else1058
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 1024
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_275
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_785
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_275: # %else1062
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 52
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_276
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_786
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_276: # %else1066
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 51
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_277
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_787
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_277: # %else1070
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 50
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_278
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_788
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_278: # %else1074
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 49
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_279
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_789
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_279: # %else1078
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 48
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_280
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_790
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_280: # %else1082
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 47
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_281
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_791
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_281: # %else1086
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 46
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_282
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_792
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_282: # %else1090
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 45
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_283
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_793
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_283: # %else1094
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 44
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_284
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_794
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_284: # %else1098
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 43
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_285
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_795
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_285: # %else1102
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 42
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_286
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_796
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_286: # %else1106
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 41
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_287
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_797
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_287: # %else1110
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 40
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_288
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_798
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_288: # %else1114
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 39
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_289
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_799
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_289: # %else1118
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 38
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_290
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_800
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_290: # %else1122
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 37
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_291
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_801
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_291: # %else1126
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 36
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_292
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_802
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_292: # %else1130
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 35
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_293
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_803
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_293: # %else1134
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 34
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_294
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_804
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_294: # %else1138
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 33
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_295
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_805
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_295: # %else1142
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 32
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_296
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_806
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_296: # %else1146
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 31
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_297
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_807
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_297: # %else1150
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 30
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_298
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_808
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_298: # %else1154
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 29
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_299
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_809
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_299: # %else1158
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 28
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_300
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_810
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_300: # %else1162
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 27
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_301
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_811
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_301: # %else1166
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 26
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_302
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_812
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_302: # %else1170
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 25
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_303
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_813
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_303: # %else1174
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 24
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_304
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_814
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_304: # %else1178
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 23
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_305
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_815
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_305: # %else1182
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 22
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_306
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_816
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_306: # %else1186
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 21
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_307
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_817
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_307: # %else1190
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 20
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_308
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_818
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_308: # %else1194
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 19
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_309
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_819
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_309: # %else1198
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 18
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_310
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_820
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_310: # %else1202
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 17
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_311
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_821
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_311: # %else1206
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 16
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_312
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_822
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_312: # %else1210
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 15
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_313
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_823
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_313: # %else1214
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 14
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_314
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_824
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_314: # %else1218
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 13
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_315
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_825
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_315: # %else1222
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 12
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_316
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_826
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_316: # %else1226
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 11
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_317
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_827
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_317: # %else1230
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 10
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_318
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_828
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_318: # %else1234
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 9
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_319
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_829
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_319: # %else1238
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 8
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_320
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_830
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_320: # %else1242
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 7
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_321
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_831
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_321: # %else1246
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 6
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_322
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_832
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_322: # %else1250
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 5
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_323
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_833
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_323: # %else1254
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 4
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_324
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_834
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_324: # %else1258
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 3
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_325
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_835
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_325: # %else1262
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 2
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_327
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_326: # %cond.load1265
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 318
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 317
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_327: # %else1266
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 1
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslidedown.vi v16, v0, 5
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_329
-; CHECK-VRGATHER-RV64-NEXT:  # %bb.328: # %cond.load1269
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 319
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 318
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_329: # %else1270
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.x.s a1, v16
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_330
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_836
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_330: # %else1274
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 1
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_331
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_837
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_331: # %else1278
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 2
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_332
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_838
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_332: # %else1282
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 4
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_333
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_839
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_333: # %else1286
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 8
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_334
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_840
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_334: # %else1290
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 16
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_335
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_841
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_335: # %else1294
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 32
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_336
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_842
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_336: # %else1298
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 64
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_337
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_843
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_337: # %else1302
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 128
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_338
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_844
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_338: # %else1306
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 256
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_339
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_845
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_339: # %else1310
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 512
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_340
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_846
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_340: # %else1314
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 1024
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_341
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_847
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_341: # %else1318
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 52
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_342
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_848
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_342: # %else1322
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 51
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_343
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_849
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_343: # %else1326
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 50
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_344
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_850
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_344: # %else1330
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 49
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_345
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_851
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_345: # %else1334
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 48
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_346
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_852
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_346: # %else1338
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 47
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_347
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_853
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_347: # %else1342
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 46
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_348
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_854
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_348: # %else1346
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 45
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_349
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_855
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_349: # %else1350
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 44
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_350
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_856
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_350: # %else1354
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 43
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_351
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_857
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_351: # %else1358
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 42
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_352
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_858
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_352: # %else1362
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 41
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_353
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_859
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_353: # %else1366
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 40
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_354
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_860
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_354: # %else1370
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 39
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_355
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_861
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_355: # %else1374
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 38
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_356
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_862
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_356: # %else1378
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 37
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_357
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_863
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_357: # %else1382
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 36
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_358
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_864
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_358: # %else1386
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 35
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_359
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_865
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_359: # %else1390
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 34
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_360
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_866
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_360: # %else1394
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 33
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_361
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_867
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_361: # %else1398
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 32
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_362
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_868
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_362: # %else1402
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 31
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_363
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_869
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_363: # %else1406
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 30
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_364
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_870
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_364: # %else1410
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 29
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_365
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_871
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_365: # %else1414
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 28
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_366
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_872
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_366: # %else1418
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 27
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_367
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_873
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_367: # %else1422
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 26
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_368
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_874
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_368: # %else1426
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 25
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_369
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_875
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_369: # %else1430
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 24
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_370
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_876
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_370: # %else1434
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 23
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_371
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_877
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_371: # %else1438
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 22
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_372
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_878
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_372: # %else1442
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 21
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_373
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_879
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_373: # %else1446
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 20
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_374
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_880
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_374: # %else1450
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 19
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_375
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_881
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_375: # %else1454
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 18
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_376
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_882
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_376: # %else1458
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 17
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_377
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_883
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_377: # %else1462
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 16
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_378
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_884
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_378: # %else1466
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 15
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_379
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_885
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_379: # %else1470
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 14
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_380
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_886
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_380: # %else1474
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 13
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_381
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_887
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_381: # %else1478
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 12
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_382
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_888
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_382: # %else1482
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 11
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_383
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_889
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_383: # %else1486
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 10
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_384
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_890
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_384: # %else1490
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 9
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_385
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_891
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_385: # %else1494
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 8
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_386
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_892
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_386: # %else1498
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 7
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_387
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_893
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_387: # %else1502
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 6
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_388
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_894
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_388: # %else1506
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 5
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_389
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_895
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_389: # %else1510
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 4
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_390
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_896
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_390: # %else1514
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 3
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_391
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_897
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_391: # %else1518
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 2
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_393
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_392: # %cond.load1521
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 382
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 381
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_393: # %else1522
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 1
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslidedown.vi v16, v0, 6
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_395
-; CHECK-VRGATHER-RV64-NEXT:  # %bb.394: # %cond.load1525
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 383
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 382
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_395: # %else1526
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.x.s a2, v16
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_396
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_898
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_396: # %else1530
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 1
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_397
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_899
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_397: # %else1534
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 2
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_398
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_900
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_398: # %else1538
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 4
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_399
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_901
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_399: # %else1542
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 8
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_400
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_902
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_400: # %else1546
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 16
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_401
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_903
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_401: # %else1550
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 32
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_402
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_904
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_402: # %else1554
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 64
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_403
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_905
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_403: # %else1558
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 128
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_404
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_906
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_404: # %else1562
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 256
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_405
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_907
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_405: # %else1566
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 512
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_406
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_908
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_406: # %else1570
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 1024
-; CHECK-VRGATHER-RV64-NEXT:    beqz a1, .LBB61_407
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_909
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_407: # %else1574
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 52
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_408
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_910
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_408: # %else1578
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 51
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_409
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_911
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_409: # %else1582
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 50
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_410
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_912
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_410: # %else1586
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 49
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_411
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_913
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_411: # %else1590
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 48
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_412
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_914
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_412: # %else1594
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 47
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_413
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_915
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_413: # %else1598
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 46
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_414
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_916
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_414: # %else1602
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 45
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_415
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_917
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_415: # %else1606
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 44
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_416
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_918
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_416: # %else1610
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 43
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_417
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_919
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_417: # %else1614
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 42
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_418
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_920
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_418: # %else1618
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 41
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_419
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_921
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_419: # %else1622
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 40
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_420
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_922
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_420: # %else1626
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 39
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_421
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_923
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_421: # %else1630
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 38
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_422
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_924
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_422: # %else1634
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 37
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_423
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_925
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_423: # %else1638
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 36
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_424
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_926
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_424: # %else1642
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 35
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_425
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_927
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_425: # %else1646
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 34
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_426
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_928
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_426: # %else1650
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 33
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_427
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_929
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_427: # %else1654
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 32
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_428
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_930
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_428: # %else1658
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 31
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_429
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_931
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_429: # %else1662
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 30
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_430
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_932
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_430: # %else1666
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 29
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_431
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_933
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_431: # %else1670
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 28
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_432
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_934
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_432: # %else1674
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 27
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_433
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_935
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_433: # %else1678
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 26
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_434
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_936
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_434: # %else1682
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 25
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_435
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_937
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_435: # %else1686
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 24
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_436
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_938
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_436: # %else1690
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 23
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_437
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_939
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_437: # %else1694
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 22
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_438
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_940
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_438: # %else1698
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 21
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_439
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_941
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_439: # %else1702
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 20
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_440
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_942
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_440: # %else1706
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 19
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_441
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_943
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_441: # %else1710
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 18
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_442
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_944
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_442: # %else1714
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 17
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_443
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_945
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_443: # %else1718
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 16
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_444
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_946
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_444: # %else1722
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 15
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_445
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_947
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_445: # %else1726
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 14
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_446
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_948
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_446: # %else1730
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 13
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_447
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_949
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_447: # %else1734
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 12
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_448
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_950
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_448: # %else1738
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 11
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_449
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_951
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_449: # %else1742
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 10
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_450
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_952
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_450: # %else1746
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 9
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_451
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_953
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_451: # %else1750
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 8
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_452
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_954
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_452: # %else1754
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 7
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_453
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_955
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_453: # %else1758
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 6
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_454
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_956
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_454: # %else1762
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 5
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_455
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_957
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_455: # %else1766
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 4
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_456
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_958
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_456: # %else1770
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 3
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_457
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_959
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_457: # %else1774
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 2
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_459
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_458: # %cond.load1777
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 446
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 445
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_459: # %else1778
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 1
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslidedown.vi v16, v0, 7
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_461
-; CHECK-VRGATHER-RV64-NEXT:  # %bb.460: # %cond.load1781
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 447
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 446
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_461: # %else1782
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.x.s a1, v16
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_462
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_960
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_462: # %else1786
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 1
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_463
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_961
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_463: # %else1790
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 2
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_464
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_962
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_464: # %else1794
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 4
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_465
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_963
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_465: # %else1798
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 8
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_466
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_964
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_466: # %else1802
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 16
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_467
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_965
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_467: # %else1806
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 32
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_468
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_966
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_468: # %else1810
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 64
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_469
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_967
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_469: # %else1814
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 128
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_470
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_968
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_470: # %else1818
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 256
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_471
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_969
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_471: # %else1822
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 512
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_472
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_970
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_472: # %else1826
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 1024
-; CHECK-VRGATHER-RV64-NEXT:    beqz a2, .LBB61_473
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_971
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_473: # %else1830
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 52
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_474
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_972
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_474: # %else1834
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 51
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_475
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_973
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_475: # %else1838
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 50
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_476
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_974
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_476: # %else1842
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 49
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_477
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_975
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_477: # %else1846
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 48
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_478
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_976
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_478: # %else1850
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 47
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_479
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_977
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_479: # %else1854
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 46
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_480
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_978
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_480: # %else1858
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 45
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_481
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_979
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_481: # %else1862
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 44
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_482
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_980
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_482: # %else1866
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 43
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_483
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_981
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_483: # %else1870
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 42
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_484
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_982
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_484: # %else1874
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 41
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_485
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_983
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_485: # %else1878
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 40
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_486
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_984
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_486: # %else1882
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 39
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_487
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_985
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_487: # %else1886
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 38
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_488
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_986
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_488: # %else1890
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 37
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_489
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_987
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_489: # %else1894
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 36
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_490
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_988
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_490: # %else1898
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 35
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_491
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_989
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_491: # %else1902
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 34
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_492
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_990
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_492: # %else1906
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 33
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_493
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_991
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_493: # %else1910
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 32
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_494
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_992
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_494: # %else1914
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 31
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_495
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_993
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_495: # %else1918
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 30
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_496
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_994
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_496: # %else1922
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 29
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_497
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_995
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_497: # %else1926
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 28
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_498
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_996
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_498: # %else1930
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 27
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_499
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_997
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_499: # %else1934
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 26
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_500
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_998
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_500: # %else1938
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 25
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_501
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_999
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_501: # %else1942
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 24
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_502
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1000
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_502: # %else1946
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 23
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_503
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1001
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_503: # %else1950
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 22
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_504
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1002
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_504: # %else1954
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 21
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_505
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1003
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_505: # %else1958
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 20
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_506
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1004
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_506: # %else1962
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 19
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_507
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1005
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_507: # %else1966
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 18
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_508
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1006
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_508: # %else1970
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 17
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_509
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1007
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_509: # %else1974
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 16
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_510
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1008
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_510: # %else1978
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 15
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_511
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1009
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_511: # %else1982
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 14
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_512
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1010
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_512: # %else1986
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 13
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_513
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1011
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_513: # %else1990
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 12
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_514
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1012
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_514: # %else1994
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 11
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_515
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1013
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_515: # %else1998
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 10
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_516
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1014
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_516: # %else2002
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 9
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_517
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1015
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_517: # %else2006
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 8
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_518
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1016
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_518: # %else2010
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 7
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_519
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1017
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_519: # %else2014
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 6
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_520
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1018
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_520: # %else2018
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 5
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_521
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1019
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_521: # %else2022
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 4
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_522
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1020
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_522: # %else2026
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 3
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_523
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1021
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_523: # %else2030
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 2
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_524
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1022
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_524: # %else2034
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 1
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_525
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1023
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_525: # %else2038
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_526
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_1024
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_526: # %else2042
-; CHECK-VRGATHER-RV64-NEXT:    ret
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_527: # %cond.load
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, zero, e8, mf8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v8, a1
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 2
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_528
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_2
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_528: # %cond.load1
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, zero, e8, mf8, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 2, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 1
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 4
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_529
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_3
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_529: # %cond.load5
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 3, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 2
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 8
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_530
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_4
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_530: # %cond.load9
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 4, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 16
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_531
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_5
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_531: # %cond.load13
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 5, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 4
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 32
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_532
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_6
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_532: # %cond.load17
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 6, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 5
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 64
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_533
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_7
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_533: # %cond.load21
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 7, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 6
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 128
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_534
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_8
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_534: # %cond.load25
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 8, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 7
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 256
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_535
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_9
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_535: # %cond.load29
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 9, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 8
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 512
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_536
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_10
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_536: # %cond.load33
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 10, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 9
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 1024
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_537
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_11
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_537: # %cond.load37
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 11, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 10
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 52
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_538
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_12
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_538: # %cond.load41
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 12, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 11
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 51
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_539
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_13
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_539: # %cond.load45
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 13, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 12
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 50
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_540
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_14
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_540: # %cond.load49
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 14, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 13
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 49
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_541
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_15
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_541: # %cond.load53
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 15, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 14
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 48
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_542
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_16
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_542: # %cond.load57
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 16, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 15
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 47
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_543
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_17
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_543: # %cond.load61
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 17, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 16
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 46
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_544
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_18
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_544: # %cond.load65
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 18, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 17
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 45
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_545
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_19
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_545: # %cond.load69
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 19, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 18
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 44
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_546
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_20
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_546: # %cond.load73
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 20, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 19
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 43
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_547
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_21
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_547: # %cond.load77
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 21, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 20
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 42
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_548
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_22
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_548: # %cond.load81
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 22, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 21
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 41
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_549
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_23
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_549: # %cond.load85
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 23, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 22
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 40
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_550
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_24
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_550: # %cond.load89
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 24, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 23
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 39
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_551
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_25
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_551: # %cond.load93
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 25, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 24
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 38
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_552
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_26
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_552: # %cond.load97
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 26, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 25
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 37
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_553
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_27
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_553: # %cond.load101
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 27, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 26
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 36
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_554
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_28
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_554: # %cond.load105
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 28, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 27
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 35
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_555
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_29
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_555: # %cond.load109
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 29, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 28
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 34
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_556
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_30
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_556: # %cond.load113
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 30, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 29
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 33
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_557
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_31
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_557: # %cond.load117
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vsetivli zero, 31, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v16, 30
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 32
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_558
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_32
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_558: # %cond.load121
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 32
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vi v8, v24, 31
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 31
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_559
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_33
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_559: # %cond.load125
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 33
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 32
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 30
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_560
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_34
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_560: # %cond.load129
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 34
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 33
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 29
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_561
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_35
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_561: # %cond.load133
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 35
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 34
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 28
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_562
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_36
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_562: # %cond.load137
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 36
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 35
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 27
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_563
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_37
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_563: # %cond.load141
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 37
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 36
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 26
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_564
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_38
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_564: # %cond.load145
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 38
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 37
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 25
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_565
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_39
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_565: # %cond.load149
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 39
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 38
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 24
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_566
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_40
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_566: # %cond.load153
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 40
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 39
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 23
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_567
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_41
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_567: # %cond.load157
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 41
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 40
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 22
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_568
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_42
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_568: # %cond.load161
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 42
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 41
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 21
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_569
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_43
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_569: # %cond.load165
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 43
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 42
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 20
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_570
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_44
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_570: # %cond.load169
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 44
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 43
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 19
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_571
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_45
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_571: # %cond.load173
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 45
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 44
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 18
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_572
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_46
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_572: # %cond.load177
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 46
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 45
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 17
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_573
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_47
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_573: # %cond.load181
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 47
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 46
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 16
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_574
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_48
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_574: # %cond.load185
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 48
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 47
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 15
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_575
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_49
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_575: # %cond.load189
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 49
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 48
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 14
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_576
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_50
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_576: # %cond.load193
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 50
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 49
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 13
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_577
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_51
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_577: # %cond.load197
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 51
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 50
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 12
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_578
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_52
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_578: # %cond.load201
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 52
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 51
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 11
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_579
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_53
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_579: # %cond.load205
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 53
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 52
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 10
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_580
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_54
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_580: # %cond.load209
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 54
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 53
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 9
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_581
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_55
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_581: # %cond.load213
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 55
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 54
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 8
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_582
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_56
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_582: # %cond.load217
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 56
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 55
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 7
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_583
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_57
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_583: # %cond.load221
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 57
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 56
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 6
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_584
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_58
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_584: # %cond.load225
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 58
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 57
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 5
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_585
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_59
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_585: # %cond.load229
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 59
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 58
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 4
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_586
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_60
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_586: # %cond.load233
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 60
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 59
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 3
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_587
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_61
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_587: # %cond.load237
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 61
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 60
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 2
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_1025
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_62
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1025: # %cond.load237
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_63
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_588: # %cond.load249
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 64
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 63
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m1, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 1
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_589
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_67
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_589: # %cond.load253
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 65
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 64
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 2
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_590
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_68
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_590: # %cond.load257
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 66
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 65
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 4
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_591
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_69
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_591: # %cond.load261
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 67
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 66
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 8
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_592
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_70
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_592: # %cond.load265
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 68
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 67
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 16
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_593
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_71
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_593: # %cond.load269
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 69
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 68
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 32
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_594
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_72
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_594: # %cond.load273
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 70
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 69
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 64
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_595
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_73
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_595: # %cond.load277
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 71
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 70
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 128
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_596
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_74
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_596: # %cond.load281
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 72
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 71
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 256
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_597
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_75
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_597: # %cond.load285
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 73
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 72
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 512
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_598
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_76
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_598: # %cond.load289
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 74
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 73
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 1024
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_599
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_77
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_599: # %cond.load293
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 75
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 74
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 52
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_600
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_78
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_600: # %cond.load297
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 76
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 75
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 51
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_601
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_79
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_601: # %cond.load301
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 77
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 76
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 50
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_602
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_80
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_602: # %cond.load305
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 78
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 77
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 49
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_603
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_81
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_603: # %cond.load309
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 79
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 78
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 48
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_604
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_82
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_604: # %cond.load313
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 80
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 79
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 47
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_605
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_83
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_605: # %cond.load317
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 81
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 80
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 46
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_606
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_84
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_606: # %cond.load321
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 82
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 81
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 45
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_607
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_85
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_607: # %cond.load325
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 83
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 82
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 44
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_608
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_86
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_608: # %cond.load329
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 84
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 83
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 43
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_609
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_87
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_609: # %cond.load333
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 85
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 84
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 42
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_610
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_88
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_610: # %cond.load337
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 86
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 85
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 41
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_611
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_89
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_611: # %cond.load341
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 87
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 86
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 40
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_612
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_90
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_612: # %cond.load345
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 88
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 87
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 39
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_613
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_91
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_613: # %cond.load349
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 89
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 88
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 38
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_614
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_92
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_614: # %cond.load353
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 90
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 89
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 37
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_615
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_93
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_615: # %cond.load357
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 91
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 90
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 36
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_616
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_94
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_616: # %cond.load361
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 92
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 91
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 35
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_617
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_95
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_617: # %cond.load365
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 93
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 92
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 34
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_618
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_96
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_618: # %cond.load369
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 94
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 93
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 33
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_619
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_97
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_619: # %cond.load373
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 95
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 94
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 32
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_620
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_98
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_620: # %cond.load377
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 96
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 95
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 31
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_621
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_99
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_621: # %cond.load381
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 97
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 96
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 30
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_622
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_100
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_622: # %cond.load385
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 98
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 97
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 29
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_623
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_101
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_623: # %cond.load389
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 99
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 98
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 28
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_624
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_102
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_624: # %cond.load393
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 100
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 99
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 27
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_625
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_103
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_625: # %cond.load397
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 101
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 100
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 26
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_626
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_104
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_626: # %cond.load401
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 102
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 101
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 25
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_627
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_105
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_627: # %cond.load405
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 103
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 102
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 24
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_628
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_106
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_628: # %cond.load409
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 104
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 103
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 23
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_629
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_107
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_629: # %cond.load413
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 105
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 104
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 22
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_630
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_108
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_630: # %cond.load417
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 106
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 105
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 21
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_631
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_109
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_631: # %cond.load421
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 107
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 106
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 20
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_632
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_110
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_632: # %cond.load425
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 108
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 107
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 19
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_633
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_111
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_633: # %cond.load429
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 109
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 108
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 18
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_634
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_112
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_634: # %cond.load433
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 110
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 109
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 17
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_635
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_113
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_635: # %cond.load437
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 111
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 110
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 16
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_636
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_114
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_636: # %cond.load441
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 112
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 111
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 15
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_637
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_115
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_637: # %cond.load445
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 113
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 112
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 14
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_638
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_116
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_638: # %cond.load449
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 114
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 113
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 13
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_639
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_117
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_639: # %cond.load453
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 115
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 114
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 12
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_640
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_118
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_640: # %cond.load457
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 116
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 115
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 11
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_641
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_119
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_641: # %cond.load461
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 117
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 116
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 10
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_642
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_120
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_642: # %cond.load465
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 118
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 117
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 9
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_643
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_121
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_643: # %cond.load469
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 119
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 118
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 8
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_644
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_122
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_644: # %cond.load473
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 120
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 119
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 7
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_645
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_123
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_645: # %cond.load477
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 121
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 120
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 6
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_646
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_124
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_646: # %cond.load481
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 122
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 121
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 5
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_647
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_125
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_647: # %cond.load485
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 123
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 122
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 4
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_648
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_126
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_648: # %cond.load489
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 124
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 123
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 3
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_649
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_127
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_649: # %cond.load493
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 125
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 124
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 2
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_1026
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_128
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1026: # %cond.load493
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_129
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_650: # %cond.load505
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 128
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 127
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m2, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 1
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_651
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_133
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_651: # %cond.load509
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 129
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 128
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 2
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_652
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_134
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_652: # %cond.load513
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 130
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 129
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 4
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_653
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_135
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_653: # %cond.load517
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 131
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 130
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 8
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_654
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_136
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_654: # %cond.load521
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 132
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 131
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 16
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_655
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_137
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_655: # %cond.load525
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 133
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 132
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 32
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_656
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_138
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_656: # %cond.load529
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 134
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 133
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 64
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_657
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_139
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_657: # %cond.load533
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 135
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 134
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 128
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_658
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_140
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_658: # %cond.load537
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 136
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 135
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 256
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_659
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_141
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_659: # %cond.load541
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 137
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 136
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 512
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_660
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_142
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_660: # %cond.load545
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 138
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 137
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 1024
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_661
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_143
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_661: # %cond.load549
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 139
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 138
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 52
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_662
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_144
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_662: # %cond.load553
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 140
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 139
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 51
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_663
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_145
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_663: # %cond.load557
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 141
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 140
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 50
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_664
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_146
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_664: # %cond.load561
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 142
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 141
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 49
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_665
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_147
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_665: # %cond.load565
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 143
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 142
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 48
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_666
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_148
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_666: # %cond.load569
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 144
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 143
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 47
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_667
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_149
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_667: # %cond.load573
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 145
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 144
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 46
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_668
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_150
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_668: # %cond.load577
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 146
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 145
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 45
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_669
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_151
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_669: # %cond.load581
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 147
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 146
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 44
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_670
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_152
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_670: # %cond.load585
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 148
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 147
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 43
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_671
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_153
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_671: # %cond.load589
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 149
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 148
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 42
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_672
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_154
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_672: # %cond.load593
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 150
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 149
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 41
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_673
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_155
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_673: # %cond.load597
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 151
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 150
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 40
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_674
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_156
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_674: # %cond.load601
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 152
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 151
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 39
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_675
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_157
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_675: # %cond.load605
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 153
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 152
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 38
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_676
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_158
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_676: # %cond.load609
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 154
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 153
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 37
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_677
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_159
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_677: # %cond.load613
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 155
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 154
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 36
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_678
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_160
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_678: # %cond.load617
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 156
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 155
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 35
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_679
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_161
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_679: # %cond.load621
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 157
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 156
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 34
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_680
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_162
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_680: # %cond.load625
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 158
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 157
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 33
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_681
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_163
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_681: # %cond.load629
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 159
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 158
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 32
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_682
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_164
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_682: # %cond.load633
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 160
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 159
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 31
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_683
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_165
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_683: # %cond.load637
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 161
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 160
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 30
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_684
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_166
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_684: # %cond.load641
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 162
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 161
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 29
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_685
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_167
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_685: # %cond.load645
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 163
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 162
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 28
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_686
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_168
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_686: # %cond.load649
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 164
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 163
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 27
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_687
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_169
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_687: # %cond.load653
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 165
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 164
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 26
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_688
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_170
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_688: # %cond.load657
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 166
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 165
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 25
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_689
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_171
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_689: # %cond.load661
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 167
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 166
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 24
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_690
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_172
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_690: # %cond.load665
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 168
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 167
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 23
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_691
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_173
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_691: # %cond.load669
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 169
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 168
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 22
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_692
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_174
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_692: # %cond.load673
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 170
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 169
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 21
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_693
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_175
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_693: # %cond.load677
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 171
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 170
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 20
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_694
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_176
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_694: # %cond.load681
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 172
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 171
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 19
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_695
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_177
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_695: # %cond.load685
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 173
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 172
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 18
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_696
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_178
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_696: # %cond.load689
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 174
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 173
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 17
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_697
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_179
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_697: # %cond.load693
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 175
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 174
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 16
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_698
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_180
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_698: # %cond.load697
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 176
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 175
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 15
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_699
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_181
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_699: # %cond.load701
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 177
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 176
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 14
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_700
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_182
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_700: # %cond.load705
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 178
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 177
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 13
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_701
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_183
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_701: # %cond.load709
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 179
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 178
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 12
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_702
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_184
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_702: # %cond.load713
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 180
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 179
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 11
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_703
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_185
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_703: # %cond.load717
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 181
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 180
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 10
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_704
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_186
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_704: # %cond.load721
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 182
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 181
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 9
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_705
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_187
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_705: # %cond.load725
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 183
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 182
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 8
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_706
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_188
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_706: # %cond.load729
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 184
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 183
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 7
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_707
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_189
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_707: # %cond.load733
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 185
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 184
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 6
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_708
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_190
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_708: # %cond.load737
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 186
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 185
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 5
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_709
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_191
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_709: # %cond.load741
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 187
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 186
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 4
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_710
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_192
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_710: # %cond.load745
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 188
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 187
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 3
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_711
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_193
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_711: # %cond.load749
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 189
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 188
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 2
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_1027
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_194
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1027: # %cond.load749
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_195
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_712: # %cond.load761
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 192
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 191
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 1
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_713
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_199
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_713: # %cond.load765
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 193
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 192
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 2
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_714
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_200
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_714: # %cond.load769
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 194
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 193
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 4
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_715
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_201
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_715: # %cond.load773
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 195
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 194
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 8
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_716
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_202
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_716: # %cond.load777
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 196
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 195
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 16
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_717
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_203
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_717: # %cond.load781
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 197
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 196
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 32
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_718
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_204
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_718: # %cond.load785
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 198
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 197
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 64
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_719
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_205
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_719: # %cond.load789
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 199
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 198
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 128
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_720
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_206
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_720: # %cond.load793
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 200
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 199
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 256
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_721
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_207
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_721: # %cond.load797
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 201
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 200
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 512
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_722
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_208
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_722: # %cond.load801
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 202
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 201
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 1024
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_723
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_209
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_723: # %cond.load805
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 203
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 202
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 52
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_724
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_210
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_724: # %cond.load809
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 204
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 203
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 51
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_725
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_211
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_725: # %cond.load813
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 205
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 204
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 50
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_726
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_212
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_726: # %cond.load817
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 206
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 205
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 49
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_727
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_213
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_727: # %cond.load821
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 207
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 206
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 48
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_728
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_214
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_728: # %cond.load825
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 208
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 207
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 47
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_729
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_215
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_729: # %cond.load829
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 209
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 208
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 46
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_730
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_216
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_730: # %cond.load833
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 210
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 209
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 45
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_731
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_217
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_731: # %cond.load837
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 211
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 210
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 44
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_732
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_218
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_732: # %cond.load841
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 212
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 211
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 43
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_733
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_219
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_733: # %cond.load845
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 213
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 212
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 42
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_734
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_220
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_734: # %cond.load849
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 214
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 213
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 41
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_735
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_221
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_735: # %cond.load853
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 215
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 214
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 40
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_736
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_222
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_736: # %cond.load857
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 216
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 215
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 39
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_737
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_223
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_737: # %cond.load861
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 217
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 216
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 38
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_738
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_224
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_738: # %cond.load865
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 218
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 217
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 37
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_739
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_225
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_739: # %cond.load869
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 219
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 218
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 36
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_740
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_226
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_740: # %cond.load873
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 220
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 219
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 35
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_741
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_227
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_741: # %cond.load877
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 221
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 220
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 34
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_742
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_228
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_742: # %cond.load881
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 222
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 221
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 33
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_743
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_229
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_743: # %cond.load885
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 223
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 222
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 32
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_744
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_230
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_744: # %cond.load889
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 224
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 223
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 31
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_745
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_231
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_745: # %cond.load893
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 225
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 224
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 30
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_746
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_232
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_746: # %cond.load897
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 226
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 225
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 29
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_747
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_233
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_747: # %cond.load901
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 227
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 226
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 28
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_748
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_234
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_748: # %cond.load905
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 228
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 227
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 27
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_749
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_235
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_749: # %cond.load909
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 229
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 228
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 26
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_750
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_236
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_750: # %cond.load913
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 230
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 229
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 25
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_751
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_237
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_751: # %cond.load917
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 231
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 230
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 24
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_752
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_238
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_752: # %cond.load921
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 232
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 231
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 23
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_753
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_239
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_753: # %cond.load925
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 233
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 232
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 22
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_754
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_240
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_754: # %cond.load929
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 234
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 233
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 21
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_755
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_241
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_755: # %cond.load933
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 235
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 234
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 20
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_756
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_242
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_756: # %cond.load937
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 236
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 235
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 19
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_757
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_243
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_757: # %cond.load941
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 237
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 236
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 18
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_758
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_244
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_758: # %cond.load945
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 238
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 237
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 17
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_759
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_245
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_759: # %cond.load949
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 239
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 238
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 16
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_760
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_246
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_760: # %cond.load953
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 240
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 239
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 15
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_761
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_247
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_761: # %cond.load957
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 241
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 240
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 14
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_762
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_248
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_762: # %cond.load961
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 242
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 241
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 13
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_763
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_249
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_763: # %cond.load965
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 243
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 242
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 12
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_764
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_250
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_764: # %cond.load969
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 244
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 243
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 11
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_765
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_251
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_765: # %cond.load973
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 245
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 244
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 10
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_766
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_252
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_766: # %cond.load977
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 246
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 245
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 9
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_767
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_253
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_767: # %cond.load981
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 247
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 246
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 8
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_768
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_254
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_768: # %cond.load985
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 248
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 247
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 7
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_769
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_255
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_769: # %cond.load989
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 249
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 248
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 6
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_770
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_256
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_770: # %cond.load993
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 250
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 249
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 5
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_771
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_257
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_771: # %cond.load997
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 251
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 250
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 4
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_772
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_258
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_772: # %cond.load1001
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 252
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 251
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 3
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_773
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_259
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_773: # %cond.load1005
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 253
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 252
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 2
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_1028
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_260
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1028: # %cond.load1005
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_261
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_774: # %cond.load1017
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 256
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 255
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-VRGATHER-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 1
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_775
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_265
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_775: # %cond.load1021
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 257
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 256
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 2
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_776
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_266
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_776: # %cond.load1025
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 258
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 257
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 4
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_777
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_267
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_777: # %cond.load1029
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 259
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 258
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 8
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_778
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_268
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_778: # %cond.load1033
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 260
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 259
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 16
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_779
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_269
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_779: # %cond.load1037
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 261
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 260
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 32
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_780
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_270
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_780: # %cond.load1041
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 262
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 261
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 64
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_781
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_271
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_781: # %cond.load1045
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 263
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 262
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 128
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_782
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_272
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_782: # %cond.load1049
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 264
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 263
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 256
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_783
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_273
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_783: # %cond.load1053
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 265
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 264
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 512
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_784
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_274
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_784: # %cond.load1057
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 266
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 265
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 1024
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_785
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_275
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_785: # %cond.load1061
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 267
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 266
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 52
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_786
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_276
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_786: # %cond.load1065
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 268
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 267
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 51
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_787
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_277
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_787: # %cond.load1069
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 269
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 268
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 50
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_788
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_278
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_788: # %cond.load1073
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 270
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 269
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 49
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_789
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_279
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_789: # %cond.load1077
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 271
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 270
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 48
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_790
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_280
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_790: # %cond.load1081
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 272
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 271
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 47
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_791
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_281
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_791: # %cond.load1085
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 273
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 272
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 46
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_792
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_282
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_792: # %cond.load1089
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 274
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 273
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 45
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_793
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_283
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_793: # %cond.load1093
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 275
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 274
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 44
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_794
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_284
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_794: # %cond.load1097
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 276
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 275
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 43
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_795
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_285
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_795: # %cond.load1101
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 277
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 276
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 42
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_796
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_286
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_796: # %cond.load1105
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 278
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 277
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 41
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_797
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_287
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_797: # %cond.load1109
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 279
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 278
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 40
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_798
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_288
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_798: # %cond.load1113
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 280
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 279
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 39
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_799
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_289
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_799: # %cond.load1117
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 281
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 280
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 38
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_800
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_290
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_800: # %cond.load1121
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 282
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 281
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 37
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_801
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_291
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_801: # %cond.load1125
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 283
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 282
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 36
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_802
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_292
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_802: # %cond.load1129
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 284
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 283
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 35
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_803
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_293
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_803: # %cond.load1133
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 285
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 284
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 34
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_804
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_294
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_804: # %cond.load1137
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 286
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 285
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 33
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_805
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_295
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_805: # %cond.load1141
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 287
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 286
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 32
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_806
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_296
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_806: # %cond.load1145
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 288
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 287
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 31
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_807
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_297
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_807: # %cond.load1149
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 289
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 288
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 30
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_808
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_298
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_808: # %cond.load1153
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 290
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 289
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 29
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_809
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_299
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_809: # %cond.load1157
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 291
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 290
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 28
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_810
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_300
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_810: # %cond.load1161
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 292
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 291
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 27
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_811
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_301
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_811: # %cond.load1165
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 293
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 292
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 26
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_812
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_302
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_812: # %cond.load1169
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 294
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 293
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 25
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_813
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_303
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_813: # %cond.load1173
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 295
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 294
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 24
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_814
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_304
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_814: # %cond.load1177
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 296
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 295
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 23
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_815
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_305
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_815: # %cond.load1181
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 297
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 296
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 22
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_816
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_306
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_816: # %cond.load1185
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 298
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 297
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 21
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_817
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_307
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_817: # %cond.load1189
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 299
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 298
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 20
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_818
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_308
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_818: # %cond.load1193
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 300
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 299
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 19
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_819
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_309
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_819: # %cond.load1197
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 301
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 300
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 18
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_820
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_310
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_820: # %cond.load1201
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 302
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 301
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 17
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_821
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_311
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_821: # %cond.load1205
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 303
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 302
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 16
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_822
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_312
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_822: # %cond.load1209
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 304
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 303
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 15
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_823
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_313
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_823: # %cond.load1213
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 305
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 304
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 14
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_824
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_314
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_824: # %cond.load1217
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 306
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 305
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 13
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_825
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_315
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_825: # %cond.load1221
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 307
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 306
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 12
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_826
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_316
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_826: # %cond.load1225
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 308
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 307
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 11
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_827
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_317
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_827: # %cond.load1229
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 309
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 308
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 10
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_828
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_318
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_828: # %cond.load1233
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 310
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 309
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 9
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_829
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_319
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_829: # %cond.load1237
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 311
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 310
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 8
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_830
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_320
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_830: # %cond.load1241
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 312
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 311
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 7
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_831
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_321
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_831: # %cond.load1245
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 313
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 312
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 6
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_832
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_322
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_832: # %cond.load1249
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 314
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 313
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 5
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_833
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_323
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_833: # %cond.load1253
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 315
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 314
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 4
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_834
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_324
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_834: # %cond.load1257
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 316
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 315
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 3
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_835
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_325
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_835: # %cond.load1261
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 317
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 316
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 2
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_1029
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_326
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1029: # %cond.load1261
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_327
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_836: # %cond.load1273
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 320
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 319
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 1
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_837
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_331
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_837: # %cond.load1277
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 321
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 320
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 2
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_838
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_332
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_838: # %cond.load1281
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 322
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 321
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 4
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_839
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_333
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_839: # %cond.load1285
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 323
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 322
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 8
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_840
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_334
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_840: # %cond.load1289
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 324
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 323
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 16
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_841
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_335
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_841: # %cond.load1293
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 325
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 324
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 32
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_842
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_336
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_842: # %cond.load1297
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 326
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 325
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 64
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_843
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_337
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_843: # %cond.load1301
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 327
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 326
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 128
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_844
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_338
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_844: # %cond.load1305
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 328
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 327
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 256
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_845
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_339
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_845: # %cond.load1309
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 329
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 328
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 512
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_846
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_340
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_846: # %cond.load1313
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 330
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 329
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 1024
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_847
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_341
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_847: # %cond.load1317
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 331
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 330
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 52
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_848
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_342
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_848: # %cond.load1321
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 332
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 331
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 51
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_849
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_343
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_849: # %cond.load1325
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 333
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 332
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 50
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_850
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_344
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_850: # %cond.load1329
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 334
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 333
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 49
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_851
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_345
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_851: # %cond.load1333
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 335
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 334
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 48
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_852
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_346
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_852: # %cond.load1337
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 336
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 335
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 47
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_853
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_347
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_853: # %cond.load1341
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 337
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 336
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 46
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_854
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_348
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_854: # %cond.load1345
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 338
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 337
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 45
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_855
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_349
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_855: # %cond.load1349
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 339
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 338
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 44
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_856
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_350
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_856: # %cond.load1353
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 340
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 339
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 43
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_857
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_351
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_857: # %cond.load1357
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 341
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 340
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 42
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_858
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_352
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_858: # %cond.load1361
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 342
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 341
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 41
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_859
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_353
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_859: # %cond.load1365
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 343
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 342
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 40
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_860
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_354
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_860: # %cond.load1369
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 344
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 343
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 39
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_861
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_355
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_861: # %cond.load1373
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 345
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 344
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 38
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_862
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_356
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_862: # %cond.load1377
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 346
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 345
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 37
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_863
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_357
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_863: # %cond.load1381
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 347
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 346
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 36
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_864
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_358
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_864: # %cond.load1385
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 348
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 347
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 35
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_865
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_359
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_865: # %cond.load1389
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 349
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 348
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 34
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_866
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_360
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_866: # %cond.load1393
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 350
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 349
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 33
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_867
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_361
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_867: # %cond.load1397
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 351
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 350
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 32
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_868
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_362
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_868: # %cond.load1401
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 352
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 351
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 31
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_869
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_363
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_869: # %cond.load1405
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 353
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 352
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 30
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_870
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_364
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_870: # %cond.load1409
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 354
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 353
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 29
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_871
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_365
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_871: # %cond.load1413
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 355
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 354
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 28
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_872
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_366
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_872: # %cond.load1417
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 356
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 355
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 27
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_873
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_367
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_873: # %cond.load1421
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 357
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 356
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 26
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_874
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_368
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_874: # %cond.load1425
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 358
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 357
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 25
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_875
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_369
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_875: # %cond.load1429
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 359
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 358
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 24
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_876
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_370
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_876: # %cond.load1433
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 360
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 359
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 23
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_877
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_371
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_877: # %cond.load1437
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 361
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 360
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 22
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_878
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_372
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_878: # %cond.load1441
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 362
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 361
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 21
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_879
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_373
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_879: # %cond.load1445
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 363
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 362
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 20
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_880
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_374
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_880: # %cond.load1449
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 364
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 363
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 19
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_881
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_375
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_881: # %cond.load1453
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 365
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 364
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 18
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_882
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_376
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_882: # %cond.load1457
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 366
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 365
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 17
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_883
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_377
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_883: # %cond.load1461
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 367
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 366
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 16
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_884
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_378
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_884: # %cond.load1465
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 368
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 367
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 15
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_885
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_379
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_885: # %cond.load1469
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 369
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 368
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 14
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_886
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_380
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_886: # %cond.load1473
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 370
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 369
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 13
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_887
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_381
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_887: # %cond.load1477
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 371
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 370
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 12
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_888
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_382
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_888: # %cond.load1481
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 372
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 371
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 11
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_889
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_383
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_889: # %cond.load1485
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 373
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 372
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 10
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_890
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_384
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_890: # %cond.load1489
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 374
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 373
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 9
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_891
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_385
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_891: # %cond.load1493
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 375
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 374
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 8
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_892
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_386
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_892: # %cond.load1497
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 376
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 375
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 7
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_893
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_387
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_893: # %cond.load1501
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 377
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 376
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 6
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_894
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_388
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_894: # %cond.load1505
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 378
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 377
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 5
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_895
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_389
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_895: # %cond.load1509
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 379
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 378
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 4
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_896
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_390
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_896: # %cond.load1513
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 380
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 379
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 3
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_897
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_391
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_897: # %cond.load1517
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 381
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 380
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 2
-; CHECK-VRGATHER-RV64-NEXT:    bgez a2, .LBB61_1030
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_392
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1030: # %cond.load1517
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_393
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_898: # %cond.load1529
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 384
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 383
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 1
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_899
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_397
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_899: # %cond.load1533
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 385
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 384
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 2
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_900
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_398
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_900: # %cond.load1537
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 386
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 385
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 4
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_901
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_399
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_901: # %cond.load1541
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 387
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 386
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 8
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_902
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_400
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_902: # %cond.load1545
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 388
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 387
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 16
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_903
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_401
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_903: # %cond.load1549
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 389
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 388
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 32
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_904
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_402
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_904: # %cond.load1553
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 390
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 389
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 64
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_905
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_403
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_905: # %cond.load1557
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 391
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 390
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 128
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_906
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_404
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_906: # %cond.load1561
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 392
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 391
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 256
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_907
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_405
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_907: # %cond.load1565
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 393
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 392
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 512
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_908
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_406
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_908: # %cond.load1569
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 394
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 393
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a1, a2, 1024
-; CHECK-VRGATHER-RV64-NEXT:    bnez a1, .LBB61_909
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_407
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_909: # %cond.load1573
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 395
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 394
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 52
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_910
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_408
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_910: # %cond.load1577
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 396
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 395
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 51
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_911
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_409
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_911: # %cond.load1581
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 397
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 396
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 50
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_912
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_410
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_912: # %cond.load1585
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 398
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 397
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 49
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_913
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_411
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_913: # %cond.load1589
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 399
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 398
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 48
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_914
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_412
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_914: # %cond.load1593
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 400
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 399
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 47
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_915
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_413
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_915: # %cond.load1597
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 401
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 400
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 46
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_916
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_414
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_916: # %cond.load1601
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 402
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 401
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 45
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_917
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_415
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_917: # %cond.load1605
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 403
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 402
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 44
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_918
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_416
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_918: # %cond.load1609
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 404
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 403
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 43
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_919
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_417
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_919: # %cond.load1613
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 405
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 404
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 42
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_920
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_418
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_920: # %cond.load1617
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 406
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 405
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 41
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_921
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_419
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_921: # %cond.load1621
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 407
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 406
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 40
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_922
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_420
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_922: # %cond.load1625
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 408
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 407
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 39
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_923
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_421
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_923: # %cond.load1629
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 409
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 408
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 38
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_924
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_422
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_924: # %cond.load1633
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 410
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 409
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 37
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_925
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_423
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_925: # %cond.load1637
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 411
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 410
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 36
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_926
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_424
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_926: # %cond.load1641
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 412
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 411
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 35
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_927
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_425
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_927: # %cond.load1645
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 413
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 412
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 34
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_928
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_426
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_928: # %cond.load1649
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 414
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 413
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 33
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_929
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_427
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_929: # %cond.load1653
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 415
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 414
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 32
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_930
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_428
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_930: # %cond.load1657
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 416
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 415
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 31
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_931
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_429
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_931: # %cond.load1661
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 417
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 416
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 30
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_932
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_430
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_932: # %cond.load1665
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 418
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 417
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 29
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_933
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_431
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_933: # %cond.load1669
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 419
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 418
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 28
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_934
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_432
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_934: # %cond.load1673
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 420
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 419
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 27
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_935
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_433
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_935: # %cond.load1677
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 421
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 420
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 26
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_936
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_434
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_936: # %cond.load1681
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 422
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 421
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 25
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_937
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_435
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_937: # %cond.load1685
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 423
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 422
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 24
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_938
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_436
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_938: # %cond.load1689
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 424
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 423
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 23
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_939
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_437
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_939: # %cond.load1693
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 425
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 424
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 22
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_940
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_438
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_940: # %cond.load1697
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 426
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 425
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 21
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_941
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_439
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_941: # %cond.load1701
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 427
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 426
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 20
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_942
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_440
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_942: # %cond.load1705
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 428
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 427
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 19
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_943
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_441
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_943: # %cond.load1709
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 429
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 428
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 18
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_944
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_442
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_944: # %cond.load1713
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 430
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 429
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 17
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_945
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_443
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_945: # %cond.load1717
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 431
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 430
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 16
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_946
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_444
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_946: # %cond.load1721
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 432
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 431
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 15
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_947
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_445
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_947: # %cond.load1725
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 433
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 432
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 14
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_948
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_446
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_948: # %cond.load1729
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 434
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 433
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 13
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_949
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_447
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_949: # %cond.load1733
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 435
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 434
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 12
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_950
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_448
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_950: # %cond.load1737
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 436
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 435
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 11
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_951
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_449
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_951: # %cond.load1741
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 437
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 436
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 10
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_952
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_450
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_952: # %cond.load1745
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 438
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 437
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 9
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_953
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_451
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_953: # %cond.load1749
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 439
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 438
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 8
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_954
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_452
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_954: # %cond.load1753
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 440
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 439
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 7
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_955
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_453
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_955: # %cond.load1757
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 441
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 440
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 6
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_956
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_454
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_956: # %cond.load1761
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 442
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 441
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 5
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_957
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_455
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_957: # %cond.load1765
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 443
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 442
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 4
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_958
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_456
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_958: # %cond.load1769
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 444
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 443
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 3
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_959
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_457
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_959: # %cond.load1773
-; CHECK-VRGATHER-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 445
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 444
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a1, a2, 2
-; CHECK-VRGATHER-RV64-NEXT:    bgez a1, .LBB61_1031
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_458
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1031: # %cond.load1773
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_459
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_960: # %cond.load1785
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 448
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 447
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 1
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_961
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_463
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_961: # %cond.load1789
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 449
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 448
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 2
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_962
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_464
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_962: # %cond.load1793
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 450
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 449
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 4
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_963
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_465
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_963: # %cond.load1797
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 451
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 450
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 8
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_964
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_466
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_964: # %cond.load1801
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 452
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 451
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 16
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_965
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_467
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_965: # %cond.load1805
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 453
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 452
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 32
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_966
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_468
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_966: # %cond.load1809
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 454
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 453
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 64
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_967
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_469
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_967: # %cond.load1813
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 455
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 454
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 128
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_968
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_470
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_968: # %cond.load1817
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 456
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 455
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 256
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_969
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_471
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_969: # %cond.load1821
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 457
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 456
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 512
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_970
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_472
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_970: # %cond.load1825
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 458
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 457
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    andi a2, a1, 1024
-; CHECK-VRGATHER-RV64-NEXT:    bnez a2, .LBB61_971
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_473
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_971: # %cond.load1829
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 459
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 458
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 52
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_972
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_474
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_972: # %cond.load1833
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 460
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 459
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 51
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_973
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_475
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_973: # %cond.load1837
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 461
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 460
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 50
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_974
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_476
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_974: # %cond.load1841
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 462
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 461
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 49
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_975
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_477
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_975: # %cond.load1845
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 463
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 462
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 48
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_976
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_478
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_976: # %cond.load1849
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 464
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 463
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 47
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_977
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_479
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_977: # %cond.load1853
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 465
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 464
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 46
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_978
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_480
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_978: # %cond.load1857
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 466
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 465
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 45
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_979
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_481
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_979: # %cond.load1861
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 467
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 466
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 44
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_980
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_482
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_980: # %cond.load1865
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 468
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 467
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 43
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_981
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_483
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_981: # %cond.load1869
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 469
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 468
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 42
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_982
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_484
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_982: # %cond.load1873
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 470
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 469
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 41
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_983
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_485
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_983: # %cond.load1877
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 471
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 470
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 40
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_984
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_486
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_984: # %cond.load1881
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 472
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 471
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 39
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_985
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_487
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_985: # %cond.load1885
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 473
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 472
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 38
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_986
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_488
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_986: # %cond.load1889
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 474
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 473
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 37
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_987
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_489
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_987: # %cond.load1893
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 475
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 474
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 36
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_988
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_490
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_988: # %cond.load1897
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 476
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 475
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 35
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_989
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_491
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_989: # %cond.load1901
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 477
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 476
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 34
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_990
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_492
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_990: # %cond.load1905
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 478
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 477
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 33
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_991
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_493
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_991: # %cond.load1909
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 479
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 478
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 32
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_992
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_494
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_992: # %cond.load1913
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 480
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 479
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 31
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_993
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_495
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_993: # %cond.load1917
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 481
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 480
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 30
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_994
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_496
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_994: # %cond.load1921
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 482
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 481
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 29
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_995
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_497
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_995: # %cond.load1925
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 483
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 482
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 28
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_996
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_498
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_996: # %cond.load1929
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 484
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 483
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 27
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_997
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_499
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_997: # %cond.load1933
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 485
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 484
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 26
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_998
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_500
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_998: # %cond.load1937
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 486
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 485
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 25
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_999
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_501
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_999: # %cond.load1941
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 487
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 486
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 24
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1000
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_502
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1000: # %cond.load1945
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 488
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 487
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 23
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1001
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_503
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1001: # %cond.load1949
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 489
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 488
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 22
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1002
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_504
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1002: # %cond.load1953
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 490
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 489
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 21
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1003
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_505
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1003: # %cond.load1957
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 491
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 490
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 20
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1004
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_506
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1004: # %cond.load1961
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 492
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 491
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 19
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1005
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_507
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1005: # %cond.load1965
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 493
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 492
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 18
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1006
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_508
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1006: # %cond.load1969
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 494
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 493
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 17
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1007
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_509
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1007: # %cond.load1973
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 495
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 494
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 16
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1008
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_510
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1008: # %cond.load1977
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 496
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 495
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 15
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1009
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_511
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1009: # %cond.load1981
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 497
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 496
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 14
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1010
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_512
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1010: # %cond.load1985
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 498
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 497
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 13
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1011
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_513
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1011: # %cond.load1989
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 499
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 498
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 12
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1012
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_514
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1012: # %cond.load1993
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 500
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 499
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 11
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1013
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_515
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1013: # %cond.load1997
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 501
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 500
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 10
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1014
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_516
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1014: # %cond.load2001
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 502
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 501
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 9
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1015
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_517
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1015: # %cond.load2005
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 503
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 502
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 8
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1016
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_518
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1016: # %cond.load2009
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 504
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 503
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 7
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1017
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_519
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1017: # %cond.load2013
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 505
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 504
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 6
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1018
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_520
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1018: # %cond.load2017
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 506
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 505
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 5
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1019
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_521
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1019: # %cond.load2021
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 507
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 506
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 4
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1020
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_522
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1020: # %cond.load2025
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 508
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 507
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 3
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1021
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_523
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1021: # %cond.load2029
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 509
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 508
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 2
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1022
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_524
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1022: # %cond.load2033
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 510
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 509
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    slli a2, a1, 1
-; CHECK-VRGATHER-RV64-NEXT:    bltz a2, .LBB61_1023
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_525
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1023: # %cond.load2037
-; CHECK-VRGATHER-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-VRGATHER-RV64-NEXT:    li a2, 511
-; CHECK-VRGATHER-RV64-NEXT:    li a3, 510
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-VRGATHER-RV64-NEXT:    addi a0, a0, 1
-; CHECK-VRGATHER-RV64-NEXT:    bltz a1, .LBB61_1024
-; CHECK-VRGATHER-RV64-NEXT:    j .LBB61_526
-; CHECK-VRGATHER-RV64-NEXT:  .LBB61_1024: # %cond.load2041
-; CHECK-VRGATHER-RV64-NEXT:    lbu a0, 0(a0)
-; CHECK-VRGATHER-RV64-NEXT:    li a1, 512
-; CHECK-VRGATHER-RV64-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
-; CHECK-VRGATHER-RV64-NEXT:    vmv.s.x v16, a0
-; CHECK-VRGATHER-RV64-NEXT:    li a0, 511
-; CHECK-VRGATHER-RV64-NEXT:    vslideup.vx v8, v16, a0
-; CHECK-VRGATHER-RV64-NEXT:    ret
-;
-; CHECK-INDEXED-RV32-LABEL: test_expandload_v512i8_vlen512:
-; CHECK-INDEXED-RV32:       # %bb.0:
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a3, v0
-; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 1
-; CHECK-INDEXED-RV32-NEXT:    beqz a1, .LBB61_1
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_544
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1: # %else
-; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 2
-; CHECK-INDEXED-RV32-NEXT:    beqz a1, .LBB61_2
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_545
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_2: # %else2
-; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 4
-; CHECK-INDEXED-RV32-NEXT:    beqz a1, .LBB61_3
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_546
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_3: # %else6
-; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 8
-; CHECK-INDEXED-RV32-NEXT:    beqz a1, .LBB61_4
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_547
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_4: # %else10
-; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 16
-; CHECK-INDEXED-RV32-NEXT:    beqz a1, .LBB61_5
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_548
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_5: # %else14
-; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 32
-; CHECK-INDEXED-RV32-NEXT:    beqz a1, .LBB61_6
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_549
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_6: # %else18
-; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 64
-; CHECK-INDEXED-RV32-NEXT:    beqz a1, .LBB61_7
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_550
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_7: # %else22
-; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 128
-; CHECK-INDEXED-RV32-NEXT:    beqz a1, .LBB61_8
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_551
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_8: # %else26
-; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 256
-; CHECK-INDEXED-RV32-NEXT:    beqz a1, .LBB61_9
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_552
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_9: # %else30
-; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 512
-; CHECK-INDEXED-RV32-NEXT:    beqz a1, .LBB61_10
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_553
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_10: # %else34
-; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 1024
-; CHECK-INDEXED-RV32-NEXT:    beqz a1, .LBB61_11
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_554
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_11: # %else38
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 20
-; CHECK-INDEXED-RV32-NEXT:    bgez a1, .LBB61_12
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_555
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_12: # %else42
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 19
-; CHECK-INDEXED-RV32-NEXT:    bgez a1, .LBB61_13
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_556
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_13: # %else46
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 18
-; CHECK-INDEXED-RV32-NEXT:    bgez a1, .LBB61_14
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_557
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_14: # %else50
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 17
-; CHECK-INDEXED-RV32-NEXT:    bgez a1, .LBB61_15
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_558
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_15: # %else54
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 16
-; CHECK-INDEXED-RV32-NEXT:    bgez a1, .LBB61_16
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_559
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_16: # %else58
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 15
-; CHECK-INDEXED-RV32-NEXT:    bgez a1, .LBB61_17
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_560
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_17: # %else62
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 14
-; CHECK-INDEXED-RV32-NEXT:    bgez a1, .LBB61_18
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_561
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_18: # %else66
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 13
-; CHECK-INDEXED-RV32-NEXT:    bgez a1, .LBB61_19
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_562
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_19: # %else70
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 12
-; CHECK-INDEXED-RV32-NEXT:    bgez a1, .LBB61_20
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_563
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_20: # %else74
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 11
-; CHECK-INDEXED-RV32-NEXT:    bgez a1, .LBB61_21
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_564
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_21: # %else78
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 10
-; CHECK-INDEXED-RV32-NEXT:    bgez a1, .LBB61_22
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_565
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_22: # %else82
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 9
-; CHECK-INDEXED-RV32-NEXT:    bgez a1, .LBB61_23
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_566
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_23: # %else86
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 8
-; CHECK-INDEXED-RV32-NEXT:    bgez a1, .LBB61_24
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_567
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_24: # %else90
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 7
-; CHECK-INDEXED-RV32-NEXT:    bgez a1, .LBB61_25
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_568
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_25: # %else94
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 6
-; CHECK-INDEXED-RV32-NEXT:    bgez a1, .LBB61_26
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_569
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_26: # %else98
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 5
-; CHECK-INDEXED-RV32-NEXT:    bgez a1, .LBB61_27
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_570
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_27: # %else102
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 4
-; CHECK-INDEXED-RV32-NEXT:    bgez a1, .LBB61_28
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_571
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_28: # %else106
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 3
-; CHECK-INDEXED-RV32-NEXT:    bgez a1, .LBB61_30
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_29: # %cond.load109
-; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 29, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 28
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_30: # %else110
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 2
-; CHECK-INDEXED-RV32-NEXT:    li a1, 32
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_32
-; CHECK-INDEXED-RV32-NEXT:  # %bb.31: # %cond.load113
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 30, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 29
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_32: # %else114
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 1
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vsrl.vx v16, v0, a1
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_34
-; CHECK-INDEXED-RV32-NEXT:  # %bb.33: # %cond.load117
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v17, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 31, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v17, 30
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_34: # %else118
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a2, v16
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_35
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_572
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_35: # %else122
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_36
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_573
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_36: # %else126
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 2
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_37
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_574
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_37: # %else130
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 4
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_38
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_575
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_38: # %else134
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 8
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_39
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_576
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_39: # %else138
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 16
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_40
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_577
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_40: # %else142
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 32
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_41
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_578
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_41: # %else146
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 64
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_42
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_579
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_42: # %else150
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 128
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_43
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_580
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_43: # %else154
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 256
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_44
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_581
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_44: # %else158
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 512
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_45
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_582
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_45: # %else162
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_46
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_583
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_46: # %else166
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 20
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_47
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_584
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_47: # %else170
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 19
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_48
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_585
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_48: # %else174
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 18
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_49
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_586
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_49: # %else178
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 17
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_50
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_587
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_50: # %else182
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 16
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_51
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_588
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_51: # %else186
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 15
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_52
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_589
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_52: # %else190
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 14
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_53
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_590
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_53: # %else194
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 13
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_54
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_591
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_54: # %else198
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 12
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_55
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_592
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_55: # %else202
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 11
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_56
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_593
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_56: # %else206
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 10
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_57
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_594
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_57: # %else210
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 9
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_58
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_595
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_58: # %else214
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 8
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_59
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_596
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_59: # %else218
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 7
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_60
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_597
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_60: # %else222
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 6
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_61
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_598
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_61: # %else226
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 5
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_62
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_599
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_62: # %else230
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 4
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_63
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_600
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_63: # %else234
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 3
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_64
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_601
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_64: # %else238
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 2
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_66
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_65: # %cond.load241
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 62
-; CHECK-INDEXED-RV32-NEXT:    li a4, 61
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_66: # %else242
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 1
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vslidedown.vi v16, v0, 1
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_68
-; CHECK-INDEXED-RV32-NEXT:  # %bb.67: # %cond.load245
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v17, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 63
-; CHECK-INDEXED-RV32-NEXT:    li a4, 62
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v17, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_68: # %else246
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a3, v16
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_69
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_602
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_69: # %else250
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_70
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_603
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_70: # %else254
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 2
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_71
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_604
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_71: # %else258
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 4
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_72
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_605
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_72: # %else262
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 8
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_73
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_606
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_73: # %else266
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 16
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_74
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_607
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_74: # %else270
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 32
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_75
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_608
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_75: # %else274
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 64
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_76
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_609
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_76: # %else278
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 128
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_77
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_610
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_77: # %else282
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 256
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_78
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_611
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_78: # %else286
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 512
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_79
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_612
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_79: # %else290
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_80
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_613
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_80: # %else294
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 20
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_81
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_614
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_81: # %else298
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 19
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_82
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_615
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_82: # %else302
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 18
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_83
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_616
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_83: # %else306
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 17
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_84
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_617
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_84: # %else310
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 16
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_85
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_618
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_85: # %else314
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 15
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_86
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_619
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_86: # %else318
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 14
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_87
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_620
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_87: # %else322
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 13
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_88
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_621
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_88: # %else326
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 12
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_89
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_622
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_89: # %else330
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 11
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_90
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_623
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_90: # %else334
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 10
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_91
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_624
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_91: # %else338
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 9
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_92
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_625
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_92: # %else342
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 8
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_93
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_626
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_93: # %else346
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 7
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_94
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_627
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_94: # %else350
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 6
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_95
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_628
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_95: # %else354
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 5
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_96
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_629
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_96: # %else358
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 4
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_97
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_630
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_97: # %else362
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 3
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_98
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_631
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_98: # %else366
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 2
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_100
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_99: # %cond.load369
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 94
-; CHECK-INDEXED-RV32-NEXT:    li a4, 93
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_100: # %else370
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 1
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vsrl.vx v16, v16, a1
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_102
-; CHECK-INDEXED-RV32-NEXT:  # %bb.101: # %cond.load373
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 95
-; CHECK-INDEXED-RV32-NEXT:    li a4, 94
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_102: # %else374
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a2, v16
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_103
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_632
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_103: # %else378
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_104
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_633
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_104: # %else382
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 2
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_105
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_634
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_105: # %else386
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 4
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_106
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_635
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_106: # %else390
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 8
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_107
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_636
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_107: # %else394
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 16
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_108
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_637
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_108: # %else398
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 32
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_109
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_638
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_109: # %else402
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 64
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_110
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_639
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_110: # %else406
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 128
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_111
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_640
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_111: # %else410
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 256
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_112
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_641
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_112: # %else414
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 512
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_113
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_642
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_113: # %else418
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_114
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_643
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_114: # %else422
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 20
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_115
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_644
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_115: # %else426
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 19
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_116
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_645
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_116: # %else430
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 18
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_117
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_646
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_117: # %else434
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 17
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_118
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_647
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_118: # %else438
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 16
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_119
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_648
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_119: # %else442
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 15
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_120
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_649
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_120: # %else446
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 14
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_121
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_650
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_121: # %else450
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 13
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_122
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_651
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_122: # %else454
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 12
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_123
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_652
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_123: # %else458
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 11
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_124
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_653
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_124: # %else462
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 10
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_125
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_654
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_125: # %else466
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 9
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_126
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_655
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_126: # %else470
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 8
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_127
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_656
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_127: # %else474
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 7
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_128
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_657
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_128: # %else478
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 6
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_129
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_658
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_129: # %else482
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 5
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_130
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_659
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_130: # %else486
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 4
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_131
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_660
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_131: # %else490
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 3
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_132
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_661
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_132: # %else494
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 2
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_134
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_133: # %cond.load497
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 126
-; CHECK-INDEXED-RV32-NEXT:    li a4, 125
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_134: # %else498
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 1
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vslidedown.vi v16, v0, 2
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_136
-; CHECK-INDEXED-RV32-NEXT:  # %bb.135: # %cond.load501
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 127
-; CHECK-INDEXED-RV32-NEXT:    li a4, 126
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_136: # %else502
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a3, v16
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_137
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_662
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_137: # %else506
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_138
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_663
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_138: # %else510
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 2
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_139
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_664
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_139: # %else514
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 4
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_140
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_665
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_140: # %else518
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 8
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_141
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_666
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_141: # %else522
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 16
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_142
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_667
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_142: # %else526
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 32
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_143
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_668
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_143: # %else530
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 64
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_144
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_669
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_144: # %else534
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 128
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_145
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_670
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_145: # %else538
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 256
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_146
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_671
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_146: # %else542
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 512
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_147
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_672
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_147: # %else546
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_148
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_673
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_148: # %else550
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 20
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_149
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_674
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_149: # %else554
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 19
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_150
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_675
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_150: # %else558
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 18
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_151
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_676
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_151: # %else562
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 17
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_152
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_677
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_152: # %else566
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 16
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_153
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_678
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_153: # %else570
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 15
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_154
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_679
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_154: # %else574
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 14
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_155
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_680
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_155: # %else578
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 13
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_156
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_681
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_156: # %else582
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 12
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_157
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_682
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_157: # %else586
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 11
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_158
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_683
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_158: # %else590
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 10
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_159
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_684
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_159: # %else594
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 9
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_160
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_685
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_160: # %else598
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 8
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_161
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_686
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_161: # %else602
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 7
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_162
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_687
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_162: # %else606
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 6
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_163
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_688
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_163: # %else610
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 5
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_164
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_689
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_164: # %else614
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 4
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_165
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_690
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_165: # %else618
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 3
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_166
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_691
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_166: # %else622
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 2
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_168
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_167: # %cond.load625
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 158
-; CHECK-INDEXED-RV32-NEXT:    li a4, 157
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_168: # %else626
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 1
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vsrl.vx v16, v16, a1
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_170
-; CHECK-INDEXED-RV32-NEXT:  # %bb.169: # %cond.load629
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 159
-; CHECK-INDEXED-RV32-NEXT:    li a4, 158
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_170: # %else630
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a2, v16
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_171
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_692
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_171: # %else634
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_172
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_693
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_172: # %else638
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 2
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_173
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_694
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_173: # %else642
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 4
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_174
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_695
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_174: # %else646
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 8
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_175
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_696
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_175: # %else650
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 16
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_176
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_697
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_176: # %else654
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 32
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_177
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_698
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_177: # %else658
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 64
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_178
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_699
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_178: # %else662
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 128
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_179
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_700
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_179: # %else666
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 256
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_180
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_701
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_180: # %else670
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 512
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_181
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_702
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_181: # %else674
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_182
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_703
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_182: # %else678
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 20
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_183
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_704
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_183: # %else682
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 19
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_184
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_705
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_184: # %else686
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 18
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_185
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_706
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_185: # %else690
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 17
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_186
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_707
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_186: # %else694
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 16
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_187
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_708
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_187: # %else698
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 15
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_188
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_709
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_188: # %else702
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 14
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_189
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_710
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_189: # %else706
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 13
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_190
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_711
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_190: # %else710
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 12
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_191
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_712
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_191: # %else714
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 11
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_192
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_713
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_192: # %else718
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 10
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_193
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_714
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_193: # %else722
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 9
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_194
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_715
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_194: # %else726
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 8
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_195
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_716
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_195: # %else730
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 7
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_196
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_717
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_196: # %else734
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 6
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_197
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_718
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_197: # %else738
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 5
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_198
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_719
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_198: # %else742
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 4
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_199
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_720
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_199: # %else746
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 3
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_200
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_721
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_200: # %else750
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 2
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_202
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_201: # %cond.load753
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 190
-; CHECK-INDEXED-RV32-NEXT:    li a4, 189
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_202: # %else754
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 1
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vslidedown.vi v16, v0, 3
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_204
-; CHECK-INDEXED-RV32-NEXT:  # %bb.203: # %cond.load757
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 191
-; CHECK-INDEXED-RV32-NEXT:    li a4, 190
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_204: # %else758
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a3, v16
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_205
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_722
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_205: # %else762
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_206
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_723
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_206: # %else766
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 2
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_207
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_724
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_207: # %else770
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 4
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_208
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_725
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_208: # %else774
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 8
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_209
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_726
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_209: # %else778
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 16
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_210
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_727
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_210: # %else782
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 32
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_211
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_728
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_211: # %else786
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 64
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_212
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_729
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_212: # %else790
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 128
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_213
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_730
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_213: # %else794
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 256
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_214
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_731
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_214: # %else798
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 512
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_215
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_732
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_215: # %else802
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_216
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_733
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_216: # %else806
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 20
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_217
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_734
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_217: # %else810
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 19
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_218
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_735
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_218: # %else814
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 18
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_219
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_736
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_219: # %else818
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 17
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_220
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_737
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_220: # %else822
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 16
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_221
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_738
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_221: # %else826
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 15
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_222
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_739
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_222: # %else830
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 14
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_223
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_740
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_223: # %else834
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 13
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_224
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_741
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_224: # %else838
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 12
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_225
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_742
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_225: # %else842
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 11
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_226
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_743
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_226: # %else846
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 10
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_227
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_744
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_227: # %else850
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 9
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_228
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_745
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_228: # %else854
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 8
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_229
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_746
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_229: # %else858
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 7
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_230
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_747
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_230: # %else862
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 6
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_231
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_748
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_231: # %else866
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 5
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_232
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_749
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_232: # %else870
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 4
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_233
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_750
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_233: # %else874
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 3
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_234
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_751
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_234: # %else878
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 2
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_236
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_235: # %cond.load881
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 222
-; CHECK-INDEXED-RV32-NEXT:    li a4, 221
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_236: # %else882
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 1
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vsrl.vx v16, v16, a1
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_238
-; CHECK-INDEXED-RV32-NEXT:  # %bb.237: # %cond.load885
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 223
-; CHECK-INDEXED-RV32-NEXT:    li a4, 222
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_238: # %else886
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a2, v16
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_239
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_752
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_239: # %else890
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_240
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_753
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_240: # %else894
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 2
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_241
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_754
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_241: # %else898
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 4
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_242
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_755
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_242: # %else902
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 8
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_243
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_756
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_243: # %else906
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 16
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_244
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_757
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_244: # %else910
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 32
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_245
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_758
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_245: # %else914
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 64
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_246
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_759
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_246: # %else918
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 128
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_247
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_760
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_247: # %else922
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 256
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_248
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_761
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_248: # %else926
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 512
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_249
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_762
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_249: # %else930
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_250
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_763
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_250: # %else934
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 20
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_251
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_764
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_251: # %else938
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 19
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_252
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_765
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_252: # %else942
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 18
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_253
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_766
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_253: # %else946
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 17
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_254
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_767
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_254: # %else950
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 16
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_255
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_768
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_255: # %else954
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 15
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_256
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_769
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_256: # %else958
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 14
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_257
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_770
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_257: # %else962
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 13
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_258
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_771
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_258: # %else966
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 12
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_259
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_772
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_259: # %else970
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 11
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_260
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_773
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_260: # %else974
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 10
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_261
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_774
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_261: # %else978
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 9
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_262
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_775
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_262: # %else982
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 8
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_263
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_776
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_263: # %else986
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 7
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_264
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_777
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_264: # %else990
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 6
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_265
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_778
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_265: # %else994
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 5
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_266
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_779
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_266: # %else998
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 4
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_267
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_780
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_267: # %else1002
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 3
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_268
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_781
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_268: # %else1006
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 2
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_270
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_269: # %cond.load1009
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 254
-; CHECK-INDEXED-RV32-NEXT:    li a4, 253
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_270: # %else1010
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 1
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vslidedown.vi v16, v0, 4
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_272
-; CHECK-INDEXED-RV32-NEXT:  # %bb.271: # %cond.load1013
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 255
-; CHECK-INDEXED-RV32-NEXT:    li a4, 254
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_272: # %else1014
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a3, v16
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_273
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_782
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_273: # %else1018
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_274
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_783
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_274: # %else1022
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 2
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_275
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_784
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_275: # %else1026
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 4
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_276
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_785
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_276: # %else1030
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 8
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_277
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_786
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_277: # %else1034
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 16
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_278
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_787
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_278: # %else1038
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 32
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_279
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_788
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_279: # %else1042
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 64
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_280
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_789
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_280: # %else1046
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 128
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_281
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_790
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_281: # %else1050
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 256
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_282
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_791
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_282: # %else1054
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 512
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_283
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_792
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_283: # %else1058
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_284
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_793
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_284: # %else1062
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 20
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_285
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_794
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_285: # %else1066
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 19
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_286
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_795
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_286: # %else1070
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 18
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_287
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_796
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_287: # %else1074
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 17
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_288
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_797
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_288: # %else1078
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 16
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_289
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_798
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_289: # %else1082
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 15
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_290
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_799
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_290: # %else1086
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 14
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_291
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_800
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_291: # %else1090
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 13
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_292
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_801
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_292: # %else1094
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 12
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_293
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_802
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_293: # %else1098
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 11
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_294
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_803
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_294: # %else1102
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 10
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_295
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_804
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_295: # %else1106
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 9
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_296
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_805
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_296: # %else1110
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 8
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_297
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_806
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_297: # %else1114
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 7
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_298
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_807
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_298: # %else1118
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 6
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_299
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_808
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_299: # %else1122
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 5
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_300
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_809
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_300: # %else1126
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 4
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_301
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_810
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_301: # %else1130
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 3
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_302
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_811
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_302: # %else1134
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 2
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_304
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_303: # %cond.load1137
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 286
-; CHECK-INDEXED-RV32-NEXT:    li a4, 285
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_304: # %else1138
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 1
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vsrl.vx v16, v16, a1
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_306
-; CHECK-INDEXED-RV32-NEXT:  # %bb.305: # %cond.load1141
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 287
-; CHECK-INDEXED-RV32-NEXT:    li a4, 286
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_306: # %else1142
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a2, v16
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_307
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_812
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_307: # %else1146
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_308
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_813
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_308: # %else1150
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 2
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_309
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_814
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_309: # %else1154
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 4
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_310
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_815
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_310: # %else1158
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 8
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_311
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_816
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_311: # %else1162
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 16
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_312
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_817
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_312: # %else1166
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 32
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_313
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_818
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_313: # %else1170
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 64
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_314
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_819
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_314: # %else1174
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 128
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_315
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_820
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_315: # %else1178
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 256
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_316
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_821
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_316: # %else1182
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 512
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_317
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_822
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_317: # %else1186
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_318
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_823
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_318: # %else1190
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 20
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_319
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_824
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_319: # %else1194
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 19
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_320
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_825
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_320: # %else1198
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 18
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_321
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_826
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_321: # %else1202
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 17
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_322
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_827
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_322: # %else1206
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 16
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_323
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_828
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_323: # %else1210
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 15
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_324
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_829
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_324: # %else1214
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 14
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_325
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_830
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_325: # %else1218
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 13
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_326
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_831
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_326: # %else1222
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 12
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_327
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_832
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_327: # %else1226
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 11
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_328
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_833
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_328: # %else1230
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 10
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_329
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_834
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_329: # %else1234
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 9
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_330
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_835
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_330: # %else1238
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 8
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_331
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_836
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_331: # %else1242
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 7
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_332
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_837
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_332: # %else1246
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 6
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_333
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_838
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_333: # %else1250
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 5
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_334
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_839
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_334: # %else1254
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 4
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_335
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_840
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_335: # %else1258
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 3
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_336
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_841
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_336: # %else1262
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 2
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_338
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_337: # %cond.load1265
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 318
-; CHECK-INDEXED-RV32-NEXT:    li a4, 317
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_338: # %else1266
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 1
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vslidedown.vi v16, v0, 5
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_340
-; CHECK-INDEXED-RV32-NEXT:  # %bb.339: # %cond.load1269
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 319
-; CHECK-INDEXED-RV32-NEXT:    li a4, 318
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_340: # %else1270
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a3, v16
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_341
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_842
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_341: # %else1274
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_342
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_843
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_342: # %else1278
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 2
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_343
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_844
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_343: # %else1282
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 4
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_344
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_845
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_344: # %else1286
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 8
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_345
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_846
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_345: # %else1290
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 16
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_346
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_847
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_346: # %else1294
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 32
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_347
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_848
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_347: # %else1298
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 64
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_348
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_849
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_348: # %else1302
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 128
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_349
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_850
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_349: # %else1306
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 256
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_350
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_851
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_350: # %else1310
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 512
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_351
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_852
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_351: # %else1314
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_352
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_853
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_352: # %else1318
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 20
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_353
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_854
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_353: # %else1322
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 19
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_354
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_855
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_354: # %else1326
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 18
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_355
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_856
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_355: # %else1330
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 17
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_356
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_857
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_356: # %else1334
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 16
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_357
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_858
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_357: # %else1338
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 15
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_358
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_859
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_358: # %else1342
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 14
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_359
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_860
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_359: # %else1346
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 13
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_360
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_861
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_360: # %else1350
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 12
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_361
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_862
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_361: # %else1354
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 11
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_362
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_863
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_362: # %else1358
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 10
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_363
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_864
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_363: # %else1362
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 9
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_364
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_865
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_364: # %else1366
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 8
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_365
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_866
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_365: # %else1370
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 7
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_366
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_867
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_366: # %else1374
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 6
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_367
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_868
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_367: # %else1378
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 5
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_368
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_869
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_368: # %else1382
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 4
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_369
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_870
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_369: # %else1386
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 3
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_370
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_871
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_370: # %else1390
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 2
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_372
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_371: # %cond.load1393
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 350
-; CHECK-INDEXED-RV32-NEXT:    li a4, 349
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_372: # %else1394
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 1
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vsrl.vx v16, v16, a1
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_374
-; CHECK-INDEXED-RV32-NEXT:  # %bb.373: # %cond.load1397
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 351
-; CHECK-INDEXED-RV32-NEXT:    li a4, 350
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_374: # %else1398
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a2, v16
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_375
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_872
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_375: # %else1402
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_376
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_873
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_376: # %else1406
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 2
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_377
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_874
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_377: # %else1410
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 4
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_378
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_875
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_378: # %else1414
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 8
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_379
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_876
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_379: # %else1418
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 16
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_380
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_877
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_380: # %else1422
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 32
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_381
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_878
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_381: # %else1426
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 64
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_382
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_879
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_382: # %else1430
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 128
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_383
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_880
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_383: # %else1434
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 256
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_384
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_881
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_384: # %else1438
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 512
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_385
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_882
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_385: # %else1442
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_386
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_883
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_386: # %else1446
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 20
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_387
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_884
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_387: # %else1450
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 19
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_388
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_885
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_388: # %else1454
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 18
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_389
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_886
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_389: # %else1458
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 17
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_390
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_887
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_390: # %else1462
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 16
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_391
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_888
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_391: # %else1466
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 15
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_392
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_889
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_392: # %else1470
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 14
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_393
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_890
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_393: # %else1474
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 13
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_394
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_891
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_394: # %else1478
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 12
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_395
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_892
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_395: # %else1482
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 11
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_396
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_893
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_396: # %else1486
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 10
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_397
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_894
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_397: # %else1490
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 9
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_398
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_895
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_398: # %else1494
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 8
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_399
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_896
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_399: # %else1498
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 7
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_400
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_897
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_400: # %else1502
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 6
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_401
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_898
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_401: # %else1506
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 5
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_402
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_899
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_402: # %else1510
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 4
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_403
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_900
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_403: # %else1514
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 3
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_404
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_901
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_404: # %else1518
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 2
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_406
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_405: # %cond.load1521
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 382
-; CHECK-INDEXED-RV32-NEXT:    li a4, 381
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_406: # %else1522
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 1
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vslidedown.vi v16, v0, 6
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_408
-; CHECK-INDEXED-RV32-NEXT:  # %bb.407: # %cond.load1525
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 383
-; CHECK-INDEXED-RV32-NEXT:    li a4, 382
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_408: # %else1526
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a3, v16
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_409
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_902
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_409: # %else1530
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_410
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_903
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_410: # %else1534
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 2
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_411
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_904
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_411: # %else1538
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 4
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_412
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_905
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_412: # %else1542
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 8
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_413
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_906
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_413: # %else1546
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 16
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_414
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_907
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_414: # %else1550
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 32
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_415
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_908
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_415: # %else1554
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 64
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_416
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_909
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_416: # %else1558
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 128
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_417
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_910
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_417: # %else1562
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 256
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_418
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_911
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_418: # %else1566
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 512
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_419
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_912
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_419: # %else1570
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_420
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_913
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_420: # %else1574
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 20
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_421
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_914
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_421: # %else1578
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 19
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_422
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_915
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_422: # %else1582
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 18
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_423
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_916
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_423: # %else1586
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 17
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_424
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_917
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_424: # %else1590
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 16
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_425
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_918
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_425: # %else1594
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 15
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_426
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_919
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_426: # %else1598
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 14
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_427
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_920
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_427: # %else1602
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 13
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_428
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_921
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_428: # %else1606
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 12
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_429
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_922
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_429: # %else1610
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 11
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_430
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_923
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_430: # %else1614
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 10
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_431
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_924
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_431: # %else1618
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 9
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_432
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_925
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_432: # %else1622
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 8
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_433
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_926
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_433: # %else1626
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 7
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_434
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_927
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_434: # %else1630
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 6
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_435
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_928
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_435: # %else1634
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 5
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_436
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_929
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_436: # %else1638
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 4
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_437
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_930
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_437: # %else1642
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 3
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_438
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_931
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_438: # %else1646
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 2
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_440
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_439: # %cond.load1649
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 414
-; CHECK-INDEXED-RV32-NEXT:    li a4, 413
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_440: # %else1650
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 1
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vsrl.vx v16, v16, a1
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_442
-; CHECK-INDEXED-RV32-NEXT:  # %bb.441: # %cond.load1653
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 415
-; CHECK-INDEXED-RV32-NEXT:    li a4, 414
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_442: # %else1654
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a2, v16
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_443
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_932
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_443: # %else1658
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_444
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_933
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_444: # %else1662
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 2
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_445
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_934
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_445: # %else1666
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 4
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_446
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_935
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_446: # %else1670
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 8
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_447
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_936
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_447: # %else1674
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 16
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_448
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_937
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_448: # %else1678
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 32
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_449
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_938
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_449: # %else1682
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 64
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_450
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_939
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_450: # %else1686
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 128
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_451
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_940
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_451: # %else1690
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 256
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_452
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_941
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_452: # %else1694
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 512
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_453
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_942
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_453: # %else1698
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-INDEXED-RV32-NEXT:    beqz a3, .LBB61_454
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_943
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_454: # %else1702
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 20
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_455
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_944
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_455: # %else1706
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 19
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_456
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_945
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_456: # %else1710
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 18
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_457
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_946
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_457: # %else1714
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 17
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_458
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_947
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_458: # %else1718
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 16
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_459
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_948
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_459: # %else1722
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 15
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_460
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_949
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_460: # %else1726
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 14
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_461
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_950
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_461: # %else1730
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 13
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_462
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_951
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_462: # %else1734
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 12
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_463
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_952
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_463: # %else1738
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 11
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_464
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_953
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_464: # %else1742
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 10
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_465
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_954
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_465: # %else1746
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 9
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_466
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_955
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_466: # %else1750
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 8
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_467
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_956
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_467: # %else1754
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 7
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_468
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_957
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_468: # %else1758
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 6
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_469
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_958
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_469: # %else1762
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 5
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_470
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_959
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_470: # %else1766
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 4
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_471
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_960
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_471: # %else1770
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 3
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_472
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_961
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_472: # %else1774
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 2
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_474
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_473: # %cond.load1777
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 446
-; CHECK-INDEXED-RV32-NEXT:    li a4, 445
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_474: # %else1778
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 1
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vslidedown.vi v16, v0, 7
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_476
-; CHECK-INDEXED-RV32-NEXT:  # %bb.475: # %cond.load1781
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 447
-; CHECK-INDEXED-RV32-NEXT:    li a4, 446
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_476: # %else1782
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a3, v16
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_477
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_962
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_477: # %else1786
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_478
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_963
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_478: # %else1790
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 2
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_479
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_964
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_479: # %else1794
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 4
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_480
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_965
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_480: # %else1798
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 8
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_481
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_966
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_481: # %else1802
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 16
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_482
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_967
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_482: # %else1806
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 32
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_483
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_968
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_483: # %else1810
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 64
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_484
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_969
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_484: # %else1814
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 128
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_485
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_970
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_485: # %else1818
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 256
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_486
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_971
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_486: # %else1822
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 512
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_487
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_972
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_487: # %else1826
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_488
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_973
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_488: # %else1830
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 20
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_489
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_974
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_489: # %else1834
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 19
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_490
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_975
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_490: # %else1838
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 18
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_491
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_976
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_491: # %else1842
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 17
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_492
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_977
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_492: # %else1846
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 16
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_493
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_978
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_493: # %else1850
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 15
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_494
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_979
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_494: # %else1854
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 14
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_495
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_980
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_495: # %else1858
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 13
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_496
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_981
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_496: # %else1862
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 12
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_497
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_982
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_497: # %else1866
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 11
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_498
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_983
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_498: # %else1870
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 10
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_499
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_984
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_499: # %else1874
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 9
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_500
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_985
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_500: # %else1878
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 8
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_501
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_986
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_501: # %else1882
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 7
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_502
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_987
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_502: # %else1886
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 6
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_503
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_988
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_503: # %else1890
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 5
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_504
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_989
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_504: # %else1894
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 4
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_505
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_990
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_505: # %else1898
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 3
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_506
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_991
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_506: # %else1902
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 2
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_508
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_507: # %cond.load1905
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 478
-; CHECK-INDEXED-RV32-NEXT:    li a4, 477
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_508: # %else1906
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 1
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vsrl.vx v16, v16, a1
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_510
-; CHECK-INDEXED-RV32-NEXT:  # %bb.509: # %cond.load1909
-; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a1
-; CHECK-INDEXED-RV32-NEXT:    li a1, 479
-; CHECK-INDEXED-RV32-NEXT:    li a2, 478
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a2
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_510: # %else1910
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.x.s a1, v16
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_511
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_992
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_511: # %else1914
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 1
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_512
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_993
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_512: # %else1918
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 2
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_513
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_994
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_513: # %else1922
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 4
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_514
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_995
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_514: # %else1926
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 8
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_515
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_996
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_515: # %else1930
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 16
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_516
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_997
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_516: # %else1934
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 32
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_517
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_998
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_517: # %else1938
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 64
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_518
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_999
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_518: # %else1942
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 128
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_519
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1000
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_519: # %else1946
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 256
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_520
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1001
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_520: # %else1950
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 512
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_521
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1002
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_521: # %else1954
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 1024
-; CHECK-INDEXED-RV32-NEXT:    beqz a2, .LBB61_522
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1003
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_522: # %else1958
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 20
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_523
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1004
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_523: # %else1962
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 19
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_524
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1005
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_524: # %else1966
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 18
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_525
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1006
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_525: # %else1970
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 17
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_526
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1007
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_526: # %else1974
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 16
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_527
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1008
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_527: # %else1978
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 15
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_528
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1009
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_528: # %else1982
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 14
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_529
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1010
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_529: # %else1986
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 13
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_530
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1011
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_530: # %else1990
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 12
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_531
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1012
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_531: # %else1994
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 11
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_532
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1013
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_532: # %else1998
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 10
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_533
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1014
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_533: # %else2002
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 9
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_534
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1015
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_534: # %else2006
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 8
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_535
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1016
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_535: # %else2010
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 7
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_536
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1017
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_536: # %else2014
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 6
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_537
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1018
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_537: # %else2018
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 5
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_538
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1019
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_538: # %else2022
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 4
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_539
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1020
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_539: # %else2026
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 3
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_540
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1021
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_540: # %else2030
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 2
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_541
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1022
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_541: # %else2034
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 1
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_542
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1023
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_542: # %else2038
-; CHECK-INDEXED-RV32-NEXT:    bgez a1, .LBB61_543
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_1024
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_543: # %else2042
-; CHECK-INDEXED-RV32-NEXT:    ret
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_544: # %cond.load
-; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e8, mf8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v8, a1
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 2
-; CHECK-INDEXED-RV32-NEXT:    bnez a1, .LBB61_545
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_2
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_545: # %cond.load1
-; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e8, mf8, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 2, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 1
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 4
-; CHECK-INDEXED-RV32-NEXT:    bnez a1, .LBB61_546
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_3
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_546: # %cond.load5
-; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 3, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 2
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 8
-; CHECK-INDEXED-RV32-NEXT:    bnez a1, .LBB61_547
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_4
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_547: # %cond.load9
-; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 4, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 3
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 16
-; CHECK-INDEXED-RV32-NEXT:    bnez a1, .LBB61_548
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_5
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_548: # %cond.load13
-; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 5, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 32
-; CHECK-INDEXED-RV32-NEXT:    bnez a1, .LBB61_549
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_6
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_549: # %cond.load17
-; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 6, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 5
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 64
-; CHECK-INDEXED-RV32-NEXT:    bnez a1, .LBB61_550
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_7
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_550: # %cond.load21
-; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 7, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 6
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 128
-; CHECK-INDEXED-RV32-NEXT:    bnez a1, .LBB61_551
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_8
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_551: # %cond.load25
-; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 8, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 7
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 256
-; CHECK-INDEXED-RV32-NEXT:    bnez a1, .LBB61_552
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_9
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_552: # %cond.load29
-; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 9, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 8
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 512
-; CHECK-INDEXED-RV32-NEXT:    bnez a1, .LBB61_553
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_10
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_553: # %cond.load33
-; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 10, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 9
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a1, a3, 1024
-; CHECK-INDEXED-RV32-NEXT:    bnez a1, .LBB61_554
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_11
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_554: # %cond.load37
-; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 11, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 10
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 20
-; CHECK-INDEXED-RV32-NEXT:    bltz a1, .LBB61_555
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_12
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_555: # %cond.load41
-; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 12, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 11
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 19
-; CHECK-INDEXED-RV32-NEXT:    bltz a1, .LBB61_556
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_13
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_556: # %cond.load45
-; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 13, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 12
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 18
-; CHECK-INDEXED-RV32-NEXT:    bltz a1, .LBB61_557
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_14
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_557: # %cond.load49
-; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 14, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 13
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 17
-; CHECK-INDEXED-RV32-NEXT:    bltz a1, .LBB61_558
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_15
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_558: # %cond.load53
-; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 15, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 14
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 16
-; CHECK-INDEXED-RV32-NEXT:    bltz a1, .LBB61_559
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_16
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_559: # %cond.load57
-; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 16, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 15
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 15
-; CHECK-INDEXED-RV32-NEXT:    bltz a1, .LBB61_560
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_17
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_560: # %cond.load61
-; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 17, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 16
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 14
-; CHECK-INDEXED-RV32-NEXT:    bltz a1, .LBB61_561
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_18
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_561: # %cond.load65
-; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 18, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 17
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 13
-; CHECK-INDEXED-RV32-NEXT:    bltz a1, .LBB61_562
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_19
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_562: # %cond.load69
-; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 19, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 18
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 12
-; CHECK-INDEXED-RV32-NEXT:    bltz a1, .LBB61_563
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_20
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_563: # %cond.load73
-; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 20, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 19
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 11
-; CHECK-INDEXED-RV32-NEXT:    bltz a1, .LBB61_564
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_21
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_564: # %cond.load77
-; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 21, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 20
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 10
-; CHECK-INDEXED-RV32-NEXT:    bltz a1, .LBB61_565
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_22
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_565: # %cond.load81
-; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 22, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 21
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 9
-; CHECK-INDEXED-RV32-NEXT:    bltz a1, .LBB61_566
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_23
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_566: # %cond.load85
-; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 23, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 22
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 8
-; CHECK-INDEXED-RV32-NEXT:    bltz a1, .LBB61_567
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_24
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_567: # %cond.load89
-; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 24, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 23
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 7
-; CHECK-INDEXED-RV32-NEXT:    bltz a1, .LBB61_568
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_25
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_568: # %cond.load93
-; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 25, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 24
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 6
-; CHECK-INDEXED-RV32-NEXT:    bltz a1, .LBB61_569
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_26
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_569: # %cond.load97
-; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 26, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 25
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 5
-; CHECK-INDEXED-RV32-NEXT:    bltz a1, .LBB61_570
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_27
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_570: # %cond.load101
-; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 27, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 26
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 4
-; CHECK-INDEXED-RV32-NEXT:    bltz a1, .LBB61_571
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_28
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_571: # %cond.load105
-; CHECK-INDEXED-RV32-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 28, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v16, 27
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a1, a3, 3
-; CHECK-INDEXED-RV32-NEXT:    bgez a1, .LBB61_1025
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_29
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1025: # %cond.load105
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_30
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_572: # %cond.load121
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 32
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vi v8, v24, 31
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_573
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_36
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_573: # %cond.load125
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 33
-; CHECK-INDEXED-RV32-NEXT:    li a4, 32
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 2
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_574
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_37
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_574: # %cond.load129
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 34
-; CHECK-INDEXED-RV32-NEXT:    li a4, 33
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 4
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_575
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_38
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_575: # %cond.load133
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 35
-; CHECK-INDEXED-RV32-NEXT:    li a4, 34
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 8
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_576
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_39
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_576: # %cond.load137
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 36
-; CHECK-INDEXED-RV32-NEXT:    li a4, 35
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 16
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_577
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_40
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_577: # %cond.load141
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 37
-; CHECK-INDEXED-RV32-NEXT:    li a4, 36
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 32
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_578
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_41
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_578: # %cond.load145
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 38
-; CHECK-INDEXED-RV32-NEXT:    li a4, 37
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 64
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_579
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_42
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_579: # %cond.load149
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 39
-; CHECK-INDEXED-RV32-NEXT:    li a4, 38
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 128
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_580
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_43
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_580: # %cond.load153
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 40
-; CHECK-INDEXED-RV32-NEXT:    li a4, 39
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 256
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_581
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_44
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_581: # %cond.load157
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 41
-; CHECK-INDEXED-RV32-NEXT:    li a4, 40
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 512
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_582
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_45
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_582: # %cond.load161
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 42
-; CHECK-INDEXED-RV32-NEXT:    li a4, 41
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_583
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_46
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_583: # %cond.load165
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 43
-; CHECK-INDEXED-RV32-NEXT:    li a4, 42
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 20
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_584
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_47
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_584: # %cond.load169
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 44
-; CHECK-INDEXED-RV32-NEXT:    li a4, 43
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 19
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_585
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_48
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_585: # %cond.load173
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 45
-; CHECK-INDEXED-RV32-NEXT:    li a4, 44
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 18
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_586
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_49
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_586: # %cond.load177
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 46
-; CHECK-INDEXED-RV32-NEXT:    li a4, 45
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 17
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_587
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_50
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_587: # %cond.load181
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 47
-; CHECK-INDEXED-RV32-NEXT:    li a4, 46
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 16
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_588
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_51
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_588: # %cond.load185
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 48
-; CHECK-INDEXED-RV32-NEXT:    li a4, 47
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 15
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_589
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_52
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_589: # %cond.load189
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 49
-; CHECK-INDEXED-RV32-NEXT:    li a4, 48
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 14
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_590
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_53
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_590: # %cond.load193
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 50
-; CHECK-INDEXED-RV32-NEXT:    li a4, 49
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 13
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_591
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_54
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_591: # %cond.load197
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 51
-; CHECK-INDEXED-RV32-NEXT:    li a4, 50
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 12
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_592
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_55
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_592: # %cond.load201
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 52
-; CHECK-INDEXED-RV32-NEXT:    li a4, 51
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 11
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_593
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_56
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_593: # %cond.load205
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 53
-; CHECK-INDEXED-RV32-NEXT:    li a4, 52
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 10
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_594
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_57
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_594: # %cond.load209
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 54
-; CHECK-INDEXED-RV32-NEXT:    li a4, 53
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 9
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_595
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_58
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_595: # %cond.load213
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 55
-; CHECK-INDEXED-RV32-NEXT:    li a4, 54
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 8
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_596
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_59
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_596: # %cond.load217
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 56
-; CHECK-INDEXED-RV32-NEXT:    li a4, 55
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 7
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_597
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_60
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_597: # %cond.load221
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 57
-; CHECK-INDEXED-RV32-NEXT:    li a4, 56
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 6
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_598
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_61
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_598: # %cond.load225
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 58
-; CHECK-INDEXED-RV32-NEXT:    li a4, 57
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 5
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_599
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_62
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_599: # %cond.load229
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 59
-; CHECK-INDEXED-RV32-NEXT:    li a4, 58
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 4
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_600
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_63
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_600: # %cond.load233
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 60
-; CHECK-INDEXED-RV32-NEXT:    li a4, 59
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 3
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_601
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_64
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_601: # %cond.load237
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 61
-; CHECK-INDEXED-RV32-NEXT:    li a4, 60
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 2
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_1026
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_65
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1026: # %cond.load237
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_66
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_602: # %cond.load249
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v17, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 64
-; CHECK-INDEXED-RV32-NEXT:    li a4, 63
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m1, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v17, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_603
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_70
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_603: # %cond.load253
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 65
-; CHECK-INDEXED-RV32-NEXT:    li a4, 64
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 2
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_604
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_71
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_604: # %cond.load257
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 66
-; CHECK-INDEXED-RV32-NEXT:    li a4, 65
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 4
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_605
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_72
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_605: # %cond.load261
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 67
-; CHECK-INDEXED-RV32-NEXT:    li a4, 66
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 8
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_606
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_73
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_606: # %cond.load265
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 68
-; CHECK-INDEXED-RV32-NEXT:    li a4, 67
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 16
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_607
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_74
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_607: # %cond.load269
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 69
-; CHECK-INDEXED-RV32-NEXT:    li a4, 68
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 32
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_608
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_75
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_608: # %cond.load273
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 70
-; CHECK-INDEXED-RV32-NEXT:    li a4, 69
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 64
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_609
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_76
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_609: # %cond.load277
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 71
-; CHECK-INDEXED-RV32-NEXT:    li a4, 70
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 128
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_610
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_77
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_610: # %cond.load281
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 72
-; CHECK-INDEXED-RV32-NEXT:    li a4, 71
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 256
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_611
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_78
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_611: # %cond.load285
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 73
-; CHECK-INDEXED-RV32-NEXT:    li a4, 72
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 512
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_612
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_79
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_612: # %cond.load289
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 74
-; CHECK-INDEXED-RV32-NEXT:    li a4, 73
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_613
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_80
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_613: # %cond.load293
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 75
-; CHECK-INDEXED-RV32-NEXT:    li a4, 74
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 20
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_614
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_81
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_614: # %cond.load297
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 76
-; CHECK-INDEXED-RV32-NEXT:    li a4, 75
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 19
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_615
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_82
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_615: # %cond.load301
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 77
-; CHECK-INDEXED-RV32-NEXT:    li a4, 76
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 18
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_616
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_83
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_616: # %cond.load305
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 78
-; CHECK-INDEXED-RV32-NEXT:    li a4, 77
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 17
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_617
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_84
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_617: # %cond.load309
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 79
-; CHECK-INDEXED-RV32-NEXT:    li a4, 78
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 16
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_618
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_85
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_618: # %cond.load313
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 80
-; CHECK-INDEXED-RV32-NEXT:    li a4, 79
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 15
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_619
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_86
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_619: # %cond.load317
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 81
-; CHECK-INDEXED-RV32-NEXT:    li a4, 80
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 14
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_620
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_87
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_620: # %cond.load321
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 82
-; CHECK-INDEXED-RV32-NEXT:    li a4, 81
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 13
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_621
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_88
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_621: # %cond.load325
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 83
-; CHECK-INDEXED-RV32-NEXT:    li a4, 82
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 12
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_622
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_89
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_622: # %cond.load329
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 84
-; CHECK-INDEXED-RV32-NEXT:    li a4, 83
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 11
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_623
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_90
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_623: # %cond.load333
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 85
-; CHECK-INDEXED-RV32-NEXT:    li a4, 84
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 10
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_624
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_91
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_624: # %cond.load337
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 86
-; CHECK-INDEXED-RV32-NEXT:    li a4, 85
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 9
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_625
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_92
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_625: # %cond.load341
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 87
-; CHECK-INDEXED-RV32-NEXT:    li a4, 86
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 8
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_626
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_93
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_626: # %cond.load345
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 88
-; CHECK-INDEXED-RV32-NEXT:    li a4, 87
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 7
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_627
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_94
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_627: # %cond.load349
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 89
-; CHECK-INDEXED-RV32-NEXT:    li a4, 88
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 6
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_628
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_95
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_628: # %cond.load353
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 90
-; CHECK-INDEXED-RV32-NEXT:    li a4, 89
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 5
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_629
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_96
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_629: # %cond.load357
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 91
-; CHECK-INDEXED-RV32-NEXT:    li a4, 90
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 4
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_630
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_97
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_630: # %cond.load361
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 92
-; CHECK-INDEXED-RV32-NEXT:    li a4, 91
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 3
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_631
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_98
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_631: # %cond.load365
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 93
-; CHECK-INDEXED-RV32-NEXT:    li a4, 92
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 2
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_1027
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_99
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1027: # %cond.load365
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_100
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_632: # %cond.load377
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 96
-; CHECK-INDEXED-RV32-NEXT:    li a4, 95
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_633
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_104
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_633: # %cond.load381
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 97
-; CHECK-INDEXED-RV32-NEXT:    li a4, 96
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 2
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_634
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_105
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_634: # %cond.load385
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 98
-; CHECK-INDEXED-RV32-NEXT:    li a4, 97
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 4
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_635
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_106
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_635: # %cond.load389
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 99
-; CHECK-INDEXED-RV32-NEXT:    li a4, 98
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 8
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_636
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_107
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_636: # %cond.load393
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 100
-; CHECK-INDEXED-RV32-NEXT:    li a4, 99
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 16
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_637
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_108
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_637: # %cond.load397
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 101
-; CHECK-INDEXED-RV32-NEXT:    li a4, 100
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 32
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_638
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_109
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_638: # %cond.load401
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 102
-; CHECK-INDEXED-RV32-NEXT:    li a4, 101
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 64
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_639
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_110
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_639: # %cond.load405
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 103
-; CHECK-INDEXED-RV32-NEXT:    li a4, 102
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 128
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_640
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_111
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_640: # %cond.load409
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 104
-; CHECK-INDEXED-RV32-NEXT:    li a4, 103
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 256
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_641
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_112
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_641: # %cond.load413
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 105
-; CHECK-INDEXED-RV32-NEXT:    li a4, 104
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 512
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_642
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_113
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_642: # %cond.load417
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 106
-; CHECK-INDEXED-RV32-NEXT:    li a4, 105
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_643
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_114
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_643: # %cond.load421
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 107
-; CHECK-INDEXED-RV32-NEXT:    li a4, 106
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 20
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_644
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_115
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_644: # %cond.load425
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 108
-; CHECK-INDEXED-RV32-NEXT:    li a4, 107
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 19
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_645
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_116
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_645: # %cond.load429
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 109
-; CHECK-INDEXED-RV32-NEXT:    li a4, 108
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 18
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_646
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_117
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_646: # %cond.load433
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 110
-; CHECK-INDEXED-RV32-NEXT:    li a4, 109
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 17
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_647
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_118
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_647: # %cond.load437
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 111
-; CHECK-INDEXED-RV32-NEXT:    li a4, 110
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 16
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_648
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_119
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_648: # %cond.load441
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 112
-; CHECK-INDEXED-RV32-NEXT:    li a4, 111
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 15
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_649
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_120
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_649: # %cond.load445
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 113
-; CHECK-INDEXED-RV32-NEXT:    li a4, 112
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 14
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_650
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_121
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_650: # %cond.load449
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 114
-; CHECK-INDEXED-RV32-NEXT:    li a4, 113
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 13
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_651
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_122
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_651: # %cond.load453
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 115
-; CHECK-INDEXED-RV32-NEXT:    li a4, 114
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 12
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_652
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_123
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_652: # %cond.load457
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 116
-; CHECK-INDEXED-RV32-NEXT:    li a4, 115
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 11
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_653
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_124
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_653: # %cond.load461
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 117
-; CHECK-INDEXED-RV32-NEXT:    li a4, 116
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 10
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_654
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_125
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_654: # %cond.load465
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 118
-; CHECK-INDEXED-RV32-NEXT:    li a4, 117
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 9
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_655
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_126
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_655: # %cond.load469
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 119
-; CHECK-INDEXED-RV32-NEXT:    li a4, 118
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 8
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_656
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_127
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_656: # %cond.load473
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 120
-; CHECK-INDEXED-RV32-NEXT:    li a4, 119
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 7
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_657
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_128
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_657: # %cond.load477
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 121
-; CHECK-INDEXED-RV32-NEXT:    li a4, 120
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 6
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_658
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_129
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_658: # %cond.load481
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 122
-; CHECK-INDEXED-RV32-NEXT:    li a4, 121
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 5
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_659
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_130
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_659: # %cond.load485
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 123
-; CHECK-INDEXED-RV32-NEXT:    li a4, 122
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 4
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_660
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_131
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_660: # %cond.load489
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 124
-; CHECK-INDEXED-RV32-NEXT:    li a4, 123
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 3
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_661
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_132
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_661: # %cond.load493
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 125
-; CHECK-INDEXED-RV32-NEXT:    li a4, 124
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 2
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_1028
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_133
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1028: # %cond.load493
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_134
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_662: # %cond.load505
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v18, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 128
-; CHECK-INDEXED-RV32-NEXT:    li a4, 127
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v18, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv2r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_663
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_138
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_663: # %cond.load509
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 129
-; CHECK-INDEXED-RV32-NEXT:    li a4, 128
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 2
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_664
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_139
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_664: # %cond.load513
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 130
-; CHECK-INDEXED-RV32-NEXT:    li a4, 129
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 4
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_665
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_140
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_665: # %cond.load517
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 131
-; CHECK-INDEXED-RV32-NEXT:    li a4, 130
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 8
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_666
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_141
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_666: # %cond.load521
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 132
-; CHECK-INDEXED-RV32-NEXT:    li a4, 131
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 16
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_667
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_142
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_667: # %cond.load525
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 133
-; CHECK-INDEXED-RV32-NEXT:    li a4, 132
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 32
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_668
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_143
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_668: # %cond.load529
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 134
-; CHECK-INDEXED-RV32-NEXT:    li a4, 133
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 64
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_669
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_144
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_669: # %cond.load533
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 135
-; CHECK-INDEXED-RV32-NEXT:    li a4, 134
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 128
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_670
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_145
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_670: # %cond.load537
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 136
-; CHECK-INDEXED-RV32-NEXT:    li a4, 135
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 256
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_671
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_146
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_671: # %cond.load541
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 137
-; CHECK-INDEXED-RV32-NEXT:    li a4, 136
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 512
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_672
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_147
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_672: # %cond.load545
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 138
-; CHECK-INDEXED-RV32-NEXT:    li a4, 137
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_673
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_148
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_673: # %cond.load549
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 139
-; CHECK-INDEXED-RV32-NEXT:    li a4, 138
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 20
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_674
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_149
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_674: # %cond.load553
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 140
-; CHECK-INDEXED-RV32-NEXT:    li a4, 139
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 19
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_675
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_150
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_675: # %cond.load557
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 141
-; CHECK-INDEXED-RV32-NEXT:    li a4, 140
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 18
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_676
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_151
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_676: # %cond.load561
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 142
-; CHECK-INDEXED-RV32-NEXT:    li a4, 141
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 17
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_677
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_152
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_677: # %cond.load565
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 143
-; CHECK-INDEXED-RV32-NEXT:    li a4, 142
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 16
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_678
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_153
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_678: # %cond.load569
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 144
-; CHECK-INDEXED-RV32-NEXT:    li a4, 143
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 15
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_679
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_154
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_679: # %cond.load573
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 145
-; CHECK-INDEXED-RV32-NEXT:    li a4, 144
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 14
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_680
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_155
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_680: # %cond.load577
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 146
-; CHECK-INDEXED-RV32-NEXT:    li a4, 145
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 13
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_681
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_156
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_681: # %cond.load581
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 147
-; CHECK-INDEXED-RV32-NEXT:    li a4, 146
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 12
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_682
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_157
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_682: # %cond.load585
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 148
-; CHECK-INDEXED-RV32-NEXT:    li a4, 147
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 11
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_683
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_158
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_683: # %cond.load589
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 149
-; CHECK-INDEXED-RV32-NEXT:    li a4, 148
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 10
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_684
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_159
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_684: # %cond.load593
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 150
-; CHECK-INDEXED-RV32-NEXT:    li a4, 149
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 9
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_685
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_160
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_685: # %cond.load597
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 151
-; CHECK-INDEXED-RV32-NEXT:    li a4, 150
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 8
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_686
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_161
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_686: # %cond.load601
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 152
-; CHECK-INDEXED-RV32-NEXT:    li a4, 151
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 7
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_687
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_162
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_687: # %cond.load605
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 153
-; CHECK-INDEXED-RV32-NEXT:    li a4, 152
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 6
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_688
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_163
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_688: # %cond.load609
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 154
-; CHECK-INDEXED-RV32-NEXT:    li a4, 153
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 5
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_689
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_164
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_689: # %cond.load613
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 155
-; CHECK-INDEXED-RV32-NEXT:    li a4, 154
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 4
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_690
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_165
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_690: # %cond.load617
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 156
-; CHECK-INDEXED-RV32-NEXT:    li a4, 155
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 3
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_691
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_166
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_691: # %cond.load621
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 157
-; CHECK-INDEXED-RV32-NEXT:    li a4, 156
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 2
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_1029
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_167
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1029: # %cond.load621
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_168
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_692: # %cond.load633
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 160
-; CHECK-INDEXED-RV32-NEXT:    li a4, 159
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_693
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_172
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_693: # %cond.load637
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 161
-; CHECK-INDEXED-RV32-NEXT:    li a4, 160
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 2
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_694
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_173
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_694: # %cond.load641
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 162
-; CHECK-INDEXED-RV32-NEXT:    li a4, 161
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 4
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_695
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_174
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_695: # %cond.load645
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 163
-; CHECK-INDEXED-RV32-NEXT:    li a4, 162
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 8
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_696
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_175
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_696: # %cond.load649
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 164
-; CHECK-INDEXED-RV32-NEXT:    li a4, 163
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 16
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_697
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_176
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_697: # %cond.load653
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 165
-; CHECK-INDEXED-RV32-NEXT:    li a4, 164
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 32
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_698
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_177
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_698: # %cond.load657
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 166
-; CHECK-INDEXED-RV32-NEXT:    li a4, 165
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 64
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_699
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_178
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_699: # %cond.load661
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 167
-; CHECK-INDEXED-RV32-NEXT:    li a4, 166
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 128
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_700
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_179
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_700: # %cond.load665
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 168
-; CHECK-INDEXED-RV32-NEXT:    li a4, 167
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 256
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_701
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_180
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_701: # %cond.load669
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 169
-; CHECK-INDEXED-RV32-NEXT:    li a4, 168
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 512
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_702
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_181
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_702: # %cond.load673
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 170
-; CHECK-INDEXED-RV32-NEXT:    li a4, 169
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_703
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_182
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_703: # %cond.load677
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 171
-; CHECK-INDEXED-RV32-NEXT:    li a4, 170
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 20
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_704
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_183
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_704: # %cond.load681
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 172
-; CHECK-INDEXED-RV32-NEXT:    li a4, 171
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 19
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_705
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_184
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_705: # %cond.load685
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 173
-; CHECK-INDEXED-RV32-NEXT:    li a4, 172
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 18
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_706
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_185
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_706: # %cond.load689
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 174
-; CHECK-INDEXED-RV32-NEXT:    li a4, 173
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 17
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_707
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_186
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_707: # %cond.load693
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 175
-; CHECK-INDEXED-RV32-NEXT:    li a4, 174
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 16
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_708
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_187
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_708: # %cond.load697
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 176
-; CHECK-INDEXED-RV32-NEXT:    li a4, 175
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 15
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_709
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_188
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_709: # %cond.load701
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 177
-; CHECK-INDEXED-RV32-NEXT:    li a4, 176
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 14
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_710
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_189
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_710: # %cond.load705
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 178
-; CHECK-INDEXED-RV32-NEXT:    li a4, 177
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 13
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_711
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_190
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_711: # %cond.load709
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 179
-; CHECK-INDEXED-RV32-NEXT:    li a4, 178
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 12
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_712
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_191
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_712: # %cond.load713
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 180
-; CHECK-INDEXED-RV32-NEXT:    li a4, 179
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 11
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_713
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_192
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_713: # %cond.load717
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 181
-; CHECK-INDEXED-RV32-NEXT:    li a4, 180
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 10
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_714
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_193
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_714: # %cond.load721
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 182
-; CHECK-INDEXED-RV32-NEXT:    li a4, 181
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 9
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_715
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_194
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_715: # %cond.load725
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 183
-; CHECK-INDEXED-RV32-NEXT:    li a4, 182
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 8
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_716
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_195
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_716: # %cond.load729
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 184
-; CHECK-INDEXED-RV32-NEXT:    li a4, 183
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 7
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_717
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_196
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_717: # %cond.load733
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 185
-; CHECK-INDEXED-RV32-NEXT:    li a4, 184
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 6
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_718
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_197
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_718: # %cond.load737
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 186
-; CHECK-INDEXED-RV32-NEXT:    li a4, 185
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 5
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_719
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_198
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_719: # %cond.load741
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 187
-; CHECK-INDEXED-RV32-NEXT:    li a4, 186
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 4
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_720
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_199
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_720: # %cond.load745
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 188
-; CHECK-INDEXED-RV32-NEXT:    li a4, 187
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 3
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_721
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_200
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_721: # %cond.load749
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 189
-; CHECK-INDEXED-RV32-NEXT:    li a4, 188
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 2
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_1030
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_201
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1030: # %cond.load749
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_202
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_722: # %cond.load761
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 192
-; CHECK-INDEXED-RV32-NEXT:    li a4, 191
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_723
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_206
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_723: # %cond.load765
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 193
-; CHECK-INDEXED-RV32-NEXT:    li a4, 192
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 2
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_724
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_207
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_724: # %cond.load769
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 194
-; CHECK-INDEXED-RV32-NEXT:    li a4, 193
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 4
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_725
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_208
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_725: # %cond.load773
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 195
-; CHECK-INDEXED-RV32-NEXT:    li a4, 194
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 8
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_726
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_209
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_726: # %cond.load777
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 196
-; CHECK-INDEXED-RV32-NEXT:    li a4, 195
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 16
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_727
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_210
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_727: # %cond.load781
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 197
-; CHECK-INDEXED-RV32-NEXT:    li a4, 196
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 32
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_728
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_211
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_728: # %cond.load785
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 198
-; CHECK-INDEXED-RV32-NEXT:    li a4, 197
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 64
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_729
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_212
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_729: # %cond.load789
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 199
-; CHECK-INDEXED-RV32-NEXT:    li a4, 198
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 128
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_730
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_213
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_730: # %cond.load793
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 200
-; CHECK-INDEXED-RV32-NEXT:    li a4, 199
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 256
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_731
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_214
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_731: # %cond.load797
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 201
-; CHECK-INDEXED-RV32-NEXT:    li a4, 200
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 512
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_732
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_215
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_732: # %cond.load801
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 202
-; CHECK-INDEXED-RV32-NEXT:    li a4, 201
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_733
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_216
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_733: # %cond.load805
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 203
-; CHECK-INDEXED-RV32-NEXT:    li a4, 202
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 20
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_734
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_217
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_734: # %cond.load809
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 204
-; CHECK-INDEXED-RV32-NEXT:    li a4, 203
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 19
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_735
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_218
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_735: # %cond.load813
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 205
-; CHECK-INDEXED-RV32-NEXT:    li a4, 204
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 18
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_736
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_219
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_736: # %cond.load817
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 206
-; CHECK-INDEXED-RV32-NEXT:    li a4, 205
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 17
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_737
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_220
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_737: # %cond.load821
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 207
-; CHECK-INDEXED-RV32-NEXT:    li a4, 206
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 16
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_738
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_221
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_738: # %cond.load825
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 208
-; CHECK-INDEXED-RV32-NEXT:    li a4, 207
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 15
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_739
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_222
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_739: # %cond.load829
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 209
-; CHECK-INDEXED-RV32-NEXT:    li a4, 208
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 14
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_740
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_223
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_740: # %cond.load833
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 210
-; CHECK-INDEXED-RV32-NEXT:    li a4, 209
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 13
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_741
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_224
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_741: # %cond.load837
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 211
-; CHECK-INDEXED-RV32-NEXT:    li a4, 210
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 12
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_742
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_225
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_742: # %cond.load841
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 212
-; CHECK-INDEXED-RV32-NEXT:    li a4, 211
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 11
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_743
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_226
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_743: # %cond.load845
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 213
-; CHECK-INDEXED-RV32-NEXT:    li a4, 212
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 10
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_744
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_227
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_744: # %cond.load849
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 214
-; CHECK-INDEXED-RV32-NEXT:    li a4, 213
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 9
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_745
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_228
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_745: # %cond.load853
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 215
-; CHECK-INDEXED-RV32-NEXT:    li a4, 214
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 8
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_746
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_229
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_746: # %cond.load857
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 216
-; CHECK-INDEXED-RV32-NEXT:    li a4, 215
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 7
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_747
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_230
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_747: # %cond.load861
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 217
-; CHECK-INDEXED-RV32-NEXT:    li a4, 216
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 6
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_748
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_231
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_748: # %cond.load865
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 218
-; CHECK-INDEXED-RV32-NEXT:    li a4, 217
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 5
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_749
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_232
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_749: # %cond.load869
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 219
-; CHECK-INDEXED-RV32-NEXT:    li a4, 218
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 4
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_750
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_233
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_750: # %cond.load873
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 220
-; CHECK-INDEXED-RV32-NEXT:    li a4, 219
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 3
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_751
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_234
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_751: # %cond.load877
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 221
-; CHECK-INDEXED-RV32-NEXT:    li a4, 220
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 2
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_1031
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_235
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1031: # %cond.load877
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_236
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_752: # %cond.load889
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 224
-; CHECK-INDEXED-RV32-NEXT:    li a4, 223
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_753
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_240
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_753: # %cond.load893
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 225
-; CHECK-INDEXED-RV32-NEXT:    li a4, 224
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 2
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_754
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_241
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_754: # %cond.load897
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 226
-; CHECK-INDEXED-RV32-NEXT:    li a4, 225
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 4
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_755
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_242
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_755: # %cond.load901
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 227
-; CHECK-INDEXED-RV32-NEXT:    li a4, 226
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 8
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_756
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_243
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_756: # %cond.load905
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 228
-; CHECK-INDEXED-RV32-NEXT:    li a4, 227
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 16
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_757
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_244
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_757: # %cond.load909
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 229
-; CHECK-INDEXED-RV32-NEXT:    li a4, 228
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 32
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_758
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_245
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_758: # %cond.load913
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 230
-; CHECK-INDEXED-RV32-NEXT:    li a4, 229
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 64
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_759
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_246
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_759: # %cond.load917
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 231
-; CHECK-INDEXED-RV32-NEXT:    li a4, 230
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 128
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_760
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_247
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_760: # %cond.load921
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 232
-; CHECK-INDEXED-RV32-NEXT:    li a4, 231
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 256
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_761
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_248
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_761: # %cond.load925
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 233
-; CHECK-INDEXED-RV32-NEXT:    li a4, 232
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 512
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_762
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_249
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_762: # %cond.load929
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 234
-; CHECK-INDEXED-RV32-NEXT:    li a4, 233
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_763
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_250
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_763: # %cond.load933
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 235
-; CHECK-INDEXED-RV32-NEXT:    li a4, 234
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 20
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_764
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_251
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_764: # %cond.load937
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 236
-; CHECK-INDEXED-RV32-NEXT:    li a4, 235
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 19
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_765
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_252
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_765: # %cond.load941
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 237
-; CHECK-INDEXED-RV32-NEXT:    li a4, 236
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 18
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_766
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_253
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_766: # %cond.load945
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 238
-; CHECK-INDEXED-RV32-NEXT:    li a4, 237
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 17
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_767
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_254
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_767: # %cond.load949
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 239
-; CHECK-INDEXED-RV32-NEXT:    li a4, 238
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 16
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_768
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_255
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_768: # %cond.load953
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 240
-; CHECK-INDEXED-RV32-NEXT:    li a4, 239
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 15
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_769
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_256
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_769: # %cond.load957
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 241
-; CHECK-INDEXED-RV32-NEXT:    li a4, 240
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 14
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_770
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_257
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_770: # %cond.load961
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 242
-; CHECK-INDEXED-RV32-NEXT:    li a4, 241
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 13
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_771
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_258
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_771: # %cond.load965
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 243
-; CHECK-INDEXED-RV32-NEXT:    li a4, 242
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 12
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_772
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_259
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_772: # %cond.load969
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 244
-; CHECK-INDEXED-RV32-NEXT:    li a4, 243
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 11
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_773
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_260
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_773: # %cond.load973
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 245
-; CHECK-INDEXED-RV32-NEXT:    li a4, 244
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 10
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_774
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_261
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_774: # %cond.load977
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 246
-; CHECK-INDEXED-RV32-NEXT:    li a4, 245
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 9
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_775
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_262
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_775: # %cond.load981
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 247
-; CHECK-INDEXED-RV32-NEXT:    li a4, 246
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 8
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_776
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_263
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_776: # %cond.load985
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 248
-; CHECK-INDEXED-RV32-NEXT:    li a4, 247
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 7
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_777
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_264
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_777: # %cond.load989
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 249
-; CHECK-INDEXED-RV32-NEXT:    li a4, 248
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 6
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_778
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_265
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_778: # %cond.load993
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 250
-; CHECK-INDEXED-RV32-NEXT:    li a4, 249
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 5
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_779
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_266
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_779: # %cond.load997
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 251
-; CHECK-INDEXED-RV32-NEXT:    li a4, 250
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 4
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_780
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_267
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_780: # %cond.load1001
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 252
-; CHECK-INDEXED-RV32-NEXT:    li a4, 251
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 3
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_781
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_268
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_781: # %cond.load1005
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a3, 253
-; CHECK-INDEXED-RV32-NEXT:    li a4, 252
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 2
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_1032
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_269
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1032: # %cond.load1005
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_270
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_782: # %cond.load1017
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    li a2, 256
-; CHECK-INDEXED-RV32-NEXT:    li a4, 255
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v20, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV32-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_783
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_274
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_783: # %cond.load1021
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 257
-; CHECK-INDEXED-RV32-NEXT:    li a4, 256
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 2
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_784
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_275
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_784: # %cond.load1025
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 258
-; CHECK-INDEXED-RV32-NEXT:    li a4, 257
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 4
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_785
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_276
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_785: # %cond.load1029
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 259
-; CHECK-INDEXED-RV32-NEXT:    li a4, 258
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 8
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_786
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_277
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_786: # %cond.load1033
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 260
-; CHECK-INDEXED-RV32-NEXT:    li a4, 259
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 16
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_787
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_278
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_787: # %cond.load1037
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 261
-; CHECK-INDEXED-RV32-NEXT:    li a4, 260
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 32
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_788
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_279
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_788: # %cond.load1041
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 262
-; CHECK-INDEXED-RV32-NEXT:    li a4, 261
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 64
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_789
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_280
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_789: # %cond.load1045
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 263
-; CHECK-INDEXED-RV32-NEXT:    li a4, 262
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 128
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_790
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_281
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_790: # %cond.load1049
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 264
-; CHECK-INDEXED-RV32-NEXT:    li a4, 263
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 256
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_791
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_282
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_791: # %cond.load1053
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 265
-; CHECK-INDEXED-RV32-NEXT:    li a4, 264
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 512
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_792
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_283
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_792: # %cond.load1057
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 266
-; CHECK-INDEXED-RV32-NEXT:    li a4, 265
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_793
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_284
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_793: # %cond.load1061
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 267
-; CHECK-INDEXED-RV32-NEXT:    li a4, 266
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 20
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_794
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_285
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_794: # %cond.load1065
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 268
-; CHECK-INDEXED-RV32-NEXT:    li a4, 267
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 19
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_795
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_286
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_795: # %cond.load1069
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 269
-; CHECK-INDEXED-RV32-NEXT:    li a4, 268
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 18
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_796
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_287
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_796: # %cond.load1073
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 270
-; CHECK-INDEXED-RV32-NEXT:    li a4, 269
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 17
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_797
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_288
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_797: # %cond.load1077
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 271
-; CHECK-INDEXED-RV32-NEXT:    li a4, 270
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 16
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_798
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_289
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_798: # %cond.load1081
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 272
-; CHECK-INDEXED-RV32-NEXT:    li a4, 271
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 15
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_799
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_290
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_799: # %cond.load1085
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 273
-; CHECK-INDEXED-RV32-NEXT:    li a4, 272
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 14
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_800
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_291
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_800: # %cond.load1089
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 274
-; CHECK-INDEXED-RV32-NEXT:    li a4, 273
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 13
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_801
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_292
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_801: # %cond.load1093
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 275
-; CHECK-INDEXED-RV32-NEXT:    li a4, 274
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 12
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_802
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_293
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_802: # %cond.load1097
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 276
-; CHECK-INDEXED-RV32-NEXT:    li a4, 275
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 11
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_803
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_294
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_803: # %cond.load1101
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 277
-; CHECK-INDEXED-RV32-NEXT:    li a4, 276
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 10
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_804
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_295
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_804: # %cond.load1105
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 278
-; CHECK-INDEXED-RV32-NEXT:    li a4, 277
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 9
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_805
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_296
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_805: # %cond.load1109
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 279
-; CHECK-INDEXED-RV32-NEXT:    li a4, 278
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 8
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_806
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_297
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_806: # %cond.load1113
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 280
-; CHECK-INDEXED-RV32-NEXT:    li a4, 279
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 7
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_807
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_298
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_807: # %cond.load1117
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 281
-; CHECK-INDEXED-RV32-NEXT:    li a4, 280
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 6
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_808
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_299
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_808: # %cond.load1121
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 282
-; CHECK-INDEXED-RV32-NEXT:    li a4, 281
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 5
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_809
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_300
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_809: # %cond.load1125
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 283
-; CHECK-INDEXED-RV32-NEXT:    li a4, 282
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 4
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_810
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_301
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_810: # %cond.load1129
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 284
-; CHECK-INDEXED-RV32-NEXT:    li a4, 283
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 3
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_811
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_302
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_811: # %cond.load1133
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 285
-; CHECK-INDEXED-RV32-NEXT:    li a4, 284
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 2
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_1033
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_303
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1033: # %cond.load1133
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_304
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_812: # %cond.load1145
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 288
-; CHECK-INDEXED-RV32-NEXT:    li a4, 287
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_813
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_308
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_813: # %cond.load1149
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 289
-; CHECK-INDEXED-RV32-NEXT:    li a4, 288
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 2
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_814
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_309
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_814: # %cond.load1153
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 290
-; CHECK-INDEXED-RV32-NEXT:    li a4, 289
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 4
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_815
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_310
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_815: # %cond.load1157
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 291
-; CHECK-INDEXED-RV32-NEXT:    li a4, 290
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 8
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_816
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_311
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_816: # %cond.load1161
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 292
-; CHECK-INDEXED-RV32-NEXT:    li a4, 291
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 16
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_817
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_312
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_817: # %cond.load1165
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 293
-; CHECK-INDEXED-RV32-NEXT:    li a4, 292
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 32
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_818
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_313
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_818: # %cond.load1169
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 294
-; CHECK-INDEXED-RV32-NEXT:    li a4, 293
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 64
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_819
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_314
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_819: # %cond.load1173
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 295
-; CHECK-INDEXED-RV32-NEXT:    li a4, 294
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 128
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_820
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_315
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_820: # %cond.load1177
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 296
-; CHECK-INDEXED-RV32-NEXT:    li a4, 295
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 256
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_821
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_316
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_821: # %cond.load1181
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 297
-; CHECK-INDEXED-RV32-NEXT:    li a4, 296
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 512
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_822
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_317
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_822: # %cond.load1185
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 298
-; CHECK-INDEXED-RV32-NEXT:    li a4, 297
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_823
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_318
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_823: # %cond.load1189
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 299
-; CHECK-INDEXED-RV32-NEXT:    li a4, 298
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 20
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_824
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_319
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_824: # %cond.load1193
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 300
-; CHECK-INDEXED-RV32-NEXT:    li a4, 299
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 19
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_825
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_320
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_825: # %cond.load1197
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 301
-; CHECK-INDEXED-RV32-NEXT:    li a4, 300
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 18
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_826
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_321
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_826: # %cond.load1201
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 302
-; CHECK-INDEXED-RV32-NEXT:    li a4, 301
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 17
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_827
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_322
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_827: # %cond.load1205
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 303
-; CHECK-INDEXED-RV32-NEXT:    li a4, 302
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 16
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_828
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_323
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_828: # %cond.load1209
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 304
-; CHECK-INDEXED-RV32-NEXT:    li a4, 303
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 15
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_829
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_324
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_829: # %cond.load1213
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 305
-; CHECK-INDEXED-RV32-NEXT:    li a4, 304
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 14
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_830
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_325
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_830: # %cond.load1217
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 306
-; CHECK-INDEXED-RV32-NEXT:    li a4, 305
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 13
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_831
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_326
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_831: # %cond.load1221
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 307
-; CHECK-INDEXED-RV32-NEXT:    li a4, 306
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 12
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_832
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_327
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_832: # %cond.load1225
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 308
-; CHECK-INDEXED-RV32-NEXT:    li a4, 307
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 11
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_833
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_328
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_833: # %cond.load1229
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 309
-; CHECK-INDEXED-RV32-NEXT:    li a4, 308
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 10
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_834
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_329
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_834: # %cond.load1233
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 310
-; CHECK-INDEXED-RV32-NEXT:    li a4, 309
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 9
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_835
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_330
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_835: # %cond.load1237
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 311
-; CHECK-INDEXED-RV32-NEXT:    li a4, 310
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 8
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_836
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_331
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_836: # %cond.load1241
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 312
-; CHECK-INDEXED-RV32-NEXT:    li a4, 311
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 7
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_837
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_332
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_837: # %cond.load1245
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 313
-; CHECK-INDEXED-RV32-NEXT:    li a4, 312
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 6
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_838
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_333
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_838: # %cond.load1249
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 314
-; CHECK-INDEXED-RV32-NEXT:    li a4, 313
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 5
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_839
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_334
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_839: # %cond.load1253
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 315
-; CHECK-INDEXED-RV32-NEXT:    li a4, 314
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 4
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_840
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_335
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_840: # %cond.load1257
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 316
-; CHECK-INDEXED-RV32-NEXT:    li a4, 315
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 3
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_841
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_336
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_841: # %cond.load1261
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 317
-; CHECK-INDEXED-RV32-NEXT:    li a4, 316
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 2
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_1034
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_337
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1034: # %cond.load1261
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_338
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_842: # %cond.load1273
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 320
-; CHECK-INDEXED-RV32-NEXT:    li a4, 319
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_843
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_342
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_843: # %cond.load1277
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 321
-; CHECK-INDEXED-RV32-NEXT:    li a4, 320
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 2
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_844
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_343
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_844: # %cond.load1281
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 322
-; CHECK-INDEXED-RV32-NEXT:    li a4, 321
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 4
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_845
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_344
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_845: # %cond.load1285
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 323
-; CHECK-INDEXED-RV32-NEXT:    li a4, 322
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 8
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_846
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_345
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_846: # %cond.load1289
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 324
-; CHECK-INDEXED-RV32-NEXT:    li a4, 323
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 16
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_847
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_346
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_847: # %cond.load1293
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 325
-; CHECK-INDEXED-RV32-NEXT:    li a4, 324
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 32
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_848
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_347
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_848: # %cond.load1297
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 326
-; CHECK-INDEXED-RV32-NEXT:    li a4, 325
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 64
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_849
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_348
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_849: # %cond.load1301
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 327
-; CHECK-INDEXED-RV32-NEXT:    li a4, 326
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 128
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_850
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_349
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_850: # %cond.load1305
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 328
-; CHECK-INDEXED-RV32-NEXT:    li a4, 327
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 256
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_851
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_350
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_851: # %cond.load1309
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 329
-; CHECK-INDEXED-RV32-NEXT:    li a4, 328
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 512
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_852
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_351
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_852: # %cond.load1313
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 330
-; CHECK-INDEXED-RV32-NEXT:    li a4, 329
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_853
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_352
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_853: # %cond.load1317
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 331
-; CHECK-INDEXED-RV32-NEXT:    li a4, 330
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 20
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_854
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_353
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_854: # %cond.load1321
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 332
-; CHECK-INDEXED-RV32-NEXT:    li a4, 331
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 19
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_855
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_354
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_855: # %cond.load1325
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 333
-; CHECK-INDEXED-RV32-NEXT:    li a4, 332
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 18
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_856
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_355
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_856: # %cond.load1329
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 334
-; CHECK-INDEXED-RV32-NEXT:    li a4, 333
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 17
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_857
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_356
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_857: # %cond.load1333
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 335
-; CHECK-INDEXED-RV32-NEXT:    li a4, 334
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 16
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_858
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_357
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_858: # %cond.load1337
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 336
-; CHECK-INDEXED-RV32-NEXT:    li a4, 335
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 15
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_859
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_358
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_859: # %cond.load1341
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 337
-; CHECK-INDEXED-RV32-NEXT:    li a4, 336
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 14
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_860
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_359
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_860: # %cond.load1345
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 338
-; CHECK-INDEXED-RV32-NEXT:    li a4, 337
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 13
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_861
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_360
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_861: # %cond.load1349
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 339
-; CHECK-INDEXED-RV32-NEXT:    li a4, 338
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 12
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_862
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_361
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_862: # %cond.load1353
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 340
-; CHECK-INDEXED-RV32-NEXT:    li a4, 339
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 11
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_863
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_362
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_863: # %cond.load1357
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 341
-; CHECK-INDEXED-RV32-NEXT:    li a4, 340
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 10
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_864
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_363
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_864: # %cond.load1361
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 342
-; CHECK-INDEXED-RV32-NEXT:    li a4, 341
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 9
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_865
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_364
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_865: # %cond.load1365
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 343
-; CHECK-INDEXED-RV32-NEXT:    li a4, 342
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 8
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_866
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_365
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_866: # %cond.load1369
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 344
-; CHECK-INDEXED-RV32-NEXT:    li a4, 343
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 7
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_867
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_366
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_867: # %cond.load1373
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 345
-; CHECK-INDEXED-RV32-NEXT:    li a4, 344
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 6
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_868
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_367
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_868: # %cond.load1377
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 346
-; CHECK-INDEXED-RV32-NEXT:    li a4, 345
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 5
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_869
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_368
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_869: # %cond.load1381
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 347
-; CHECK-INDEXED-RV32-NEXT:    li a4, 346
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 4
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_870
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_369
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_870: # %cond.load1385
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 348
-; CHECK-INDEXED-RV32-NEXT:    li a4, 347
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 3
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_871
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_370
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_871: # %cond.load1389
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 349
-; CHECK-INDEXED-RV32-NEXT:    li a4, 348
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 2
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_1035
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_371
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1035: # %cond.load1389
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_372
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_872: # %cond.load1401
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 352
-; CHECK-INDEXED-RV32-NEXT:    li a4, 351
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_873
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_376
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_873: # %cond.load1405
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 353
-; CHECK-INDEXED-RV32-NEXT:    li a4, 352
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 2
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_874
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_377
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_874: # %cond.load1409
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 354
-; CHECK-INDEXED-RV32-NEXT:    li a4, 353
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 4
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_875
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_378
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_875: # %cond.load1413
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 355
-; CHECK-INDEXED-RV32-NEXT:    li a4, 354
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 8
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_876
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_379
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_876: # %cond.load1417
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 356
-; CHECK-INDEXED-RV32-NEXT:    li a4, 355
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 16
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_877
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_380
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_877: # %cond.load1421
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 357
-; CHECK-INDEXED-RV32-NEXT:    li a4, 356
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 32
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_878
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_381
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_878: # %cond.load1425
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 358
-; CHECK-INDEXED-RV32-NEXT:    li a4, 357
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 64
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_879
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_382
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_879: # %cond.load1429
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 359
-; CHECK-INDEXED-RV32-NEXT:    li a4, 358
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 128
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_880
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_383
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_880: # %cond.load1433
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 360
-; CHECK-INDEXED-RV32-NEXT:    li a4, 359
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 256
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_881
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_384
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_881: # %cond.load1437
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 361
-; CHECK-INDEXED-RV32-NEXT:    li a4, 360
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 512
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_882
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_385
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_882: # %cond.load1441
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 362
-; CHECK-INDEXED-RV32-NEXT:    li a4, 361
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_883
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_386
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_883: # %cond.load1445
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 363
-; CHECK-INDEXED-RV32-NEXT:    li a4, 362
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 20
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_884
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_387
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_884: # %cond.load1449
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 364
-; CHECK-INDEXED-RV32-NEXT:    li a4, 363
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 19
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_885
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_388
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_885: # %cond.load1453
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 365
-; CHECK-INDEXED-RV32-NEXT:    li a4, 364
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 18
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_886
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_389
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_886: # %cond.load1457
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 366
-; CHECK-INDEXED-RV32-NEXT:    li a4, 365
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 17
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_887
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_390
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_887: # %cond.load1461
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 367
-; CHECK-INDEXED-RV32-NEXT:    li a4, 366
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 16
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_888
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_391
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_888: # %cond.load1465
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 368
-; CHECK-INDEXED-RV32-NEXT:    li a4, 367
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 15
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_889
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_392
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_889: # %cond.load1469
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 369
-; CHECK-INDEXED-RV32-NEXT:    li a4, 368
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 14
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_890
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_393
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_890: # %cond.load1473
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 370
-; CHECK-INDEXED-RV32-NEXT:    li a4, 369
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 13
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_891
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_394
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_891: # %cond.load1477
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 371
-; CHECK-INDEXED-RV32-NEXT:    li a4, 370
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 12
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_892
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_395
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_892: # %cond.load1481
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 372
-; CHECK-INDEXED-RV32-NEXT:    li a4, 371
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 11
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_893
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_396
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_893: # %cond.load1485
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 373
-; CHECK-INDEXED-RV32-NEXT:    li a4, 372
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 10
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_894
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_397
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_894: # %cond.load1489
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 374
-; CHECK-INDEXED-RV32-NEXT:    li a4, 373
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 9
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_895
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_398
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_895: # %cond.load1493
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 375
-; CHECK-INDEXED-RV32-NEXT:    li a4, 374
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 8
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_896
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_399
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_896: # %cond.load1497
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 376
-; CHECK-INDEXED-RV32-NEXT:    li a4, 375
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 7
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_897
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_400
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_897: # %cond.load1501
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 377
-; CHECK-INDEXED-RV32-NEXT:    li a4, 376
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 6
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_898
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_401
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_898: # %cond.load1505
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 378
-; CHECK-INDEXED-RV32-NEXT:    li a4, 377
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 5
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_899
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_402
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_899: # %cond.load1509
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 379
-; CHECK-INDEXED-RV32-NEXT:    li a4, 378
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 4
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_900
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_403
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_900: # %cond.load1513
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 380
-; CHECK-INDEXED-RV32-NEXT:    li a4, 379
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 3
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_901
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_404
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_901: # %cond.load1517
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 381
-; CHECK-INDEXED-RV32-NEXT:    li a4, 380
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 2
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_1036
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_405
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1036: # %cond.load1517
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_406
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_902: # %cond.load1529
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 384
-; CHECK-INDEXED-RV32-NEXT:    li a4, 383
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_903
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_410
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_903: # %cond.load1533
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 385
-; CHECK-INDEXED-RV32-NEXT:    li a4, 384
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 2
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_904
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_411
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_904: # %cond.load1537
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 386
-; CHECK-INDEXED-RV32-NEXT:    li a4, 385
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 4
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_905
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_412
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_905: # %cond.load1541
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 387
-; CHECK-INDEXED-RV32-NEXT:    li a4, 386
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 8
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_906
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_413
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_906: # %cond.load1545
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 388
-; CHECK-INDEXED-RV32-NEXT:    li a4, 387
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 16
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_907
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_414
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_907: # %cond.load1549
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 389
-; CHECK-INDEXED-RV32-NEXT:    li a4, 388
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 32
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_908
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_415
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_908: # %cond.load1553
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 390
-; CHECK-INDEXED-RV32-NEXT:    li a4, 389
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 64
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_909
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_416
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_909: # %cond.load1557
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 391
-; CHECK-INDEXED-RV32-NEXT:    li a4, 390
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 128
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_910
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_417
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_910: # %cond.load1561
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 392
-; CHECK-INDEXED-RV32-NEXT:    li a4, 391
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 256
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_911
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_418
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_911: # %cond.load1565
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 393
-; CHECK-INDEXED-RV32-NEXT:    li a4, 392
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 512
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_912
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_419
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_912: # %cond.load1569
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 394
-; CHECK-INDEXED-RV32-NEXT:    li a4, 393
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_913
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_420
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_913: # %cond.load1573
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 395
-; CHECK-INDEXED-RV32-NEXT:    li a4, 394
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 20
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_914
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_421
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_914: # %cond.load1577
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 396
-; CHECK-INDEXED-RV32-NEXT:    li a4, 395
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 19
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_915
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_422
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_915: # %cond.load1581
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 397
-; CHECK-INDEXED-RV32-NEXT:    li a4, 396
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 18
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_916
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_423
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_916: # %cond.load1585
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 398
-; CHECK-INDEXED-RV32-NEXT:    li a4, 397
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 17
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_917
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_424
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_917: # %cond.load1589
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 399
-; CHECK-INDEXED-RV32-NEXT:    li a4, 398
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 16
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_918
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_425
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_918: # %cond.load1593
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 400
-; CHECK-INDEXED-RV32-NEXT:    li a4, 399
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 15
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_919
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_426
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_919: # %cond.load1597
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 401
-; CHECK-INDEXED-RV32-NEXT:    li a4, 400
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 14
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_920
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_427
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_920: # %cond.load1601
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 402
-; CHECK-INDEXED-RV32-NEXT:    li a4, 401
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 13
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_921
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_428
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_921: # %cond.load1605
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 403
-; CHECK-INDEXED-RV32-NEXT:    li a4, 402
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 12
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_922
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_429
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_922: # %cond.load1609
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 404
-; CHECK-INDEXED-RV32-NEXT:    li a4, 403
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 11
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_923
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_430
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_923: # %cond.load1613
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 405
-; CHECK-INDEXED-RV32-NEXT:    li a4, 404
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 10
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_924
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_431
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_924: # %cond.load1617
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 406
-; CHECK-INDEXED-RV32-NEXT:    li a4, 405
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 9
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_925
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_432
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_925: # %cond.load1621
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 407
-; CHECK-INDEXED-RV32-NEXT:    li a4, 406
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 8
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_926
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_433
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_926: # %cond.load1625
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 408
-; CHECK-INDEXED-RV32-NEXT:    li a4, 407
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 7
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_927
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_434
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_927: # %cond.load1629
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 409
-; CHECK-INDEXED-RV32-NEXT:    li a4, 408
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 6
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_928
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_435
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_928: # %cond.load1633
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 410
-; CHECK-INDEXED-RV32-NEXT:    li a4, 409
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 5
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_929
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_436
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_929: # %cond.load1637
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 411
-; CHECK-INDEXED-RV32-NEXT:    li a4, 410
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 4
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_930
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_437
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_930: # %cond.load1641
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 412
-; CHECK-INDEXED-RV32-NEXT:    li a4, 411
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 3
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_931
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_438
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_931: # %cond.load1645
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 413
-; CHECK-INDEXED-RV32-NEXT:    li a4, 412
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 2
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_1037
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_439
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1037: # %cond.load1645
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_440
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_932: # %cond.load1657
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 416
-; CHECK-INDEXED-RV32-NEXT:    li a4, 415
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_933
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_444
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_933: # %cond.load1661
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 417
-; CHECK-INDEXED-RV32-NEXT:    li a4, 416
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 2
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_934
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_445
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_934: # %cond.load1665
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 418
-; CHECK-INDEXED-RV32-NEXT:    li a4, 417
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 4
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_935
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_446
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_935: # %cond.load1669
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 419
-; CHECK-INDEXED-RV32-NEXT:    li a4, 418
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 8
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_936
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_447
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_936: # %cond.load1673
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 420
-; CHECK-INDEXED-RV32-NEXT:    li a4, 419
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 16
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_937
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_448
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_937: # %cond.load1677
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 421
-; CHECK-INDEXED-RV32-NEXT:    li a4, 420
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 32
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_938
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_449
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_938: # %cond.load1681
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 422
-; CHECK-INDEXED-RV32-NEXT:    li a4, 421
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 64
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_939
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_450
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_939: # %cond.load1685
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 423
-; CHECK-INDEXED-RV32-NEXT:    li a4, 422
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 128
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_940
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_451
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_940: # %cond.load1689
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 424
-; CHECK-INDEXED-RV32-NEXT:    li a4, 423
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 256
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_941
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_452
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_941: # %cond.load1693
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 425
-; CHECK-INDEXED-RV32-NEXT:    li a4, 424
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 512
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_942
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_453
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_942: # %cond.load1697
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 426
-; CHECK-INDEXED-RV32-NEXT:    li a4, 425
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a3, a2, 1024
-; CHECK-INDEXED-RV32-NEXT:    bnez a3, .LBB61_943
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_454
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_943: # %cond.load1701
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 427
-; CHECK-INDEXED-RV32-NEXT:    li a4, 426
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 20
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_944
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_455
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_944: # %cond.load1705
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 428
-; CHECK-INDEXED-RV32-NEXT:    li a4, 427
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 19
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_945
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_456
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_945: # %cond.load1709
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 429
-; CHECK-INDEXED-RV32-NEXT:    li a4, 428
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 18
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_946
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_457
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_946: # %cond.load1713
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 430
-; CHECK-INDEXED-RV32-NEXT:    li a4, 429
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 17
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_947
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_458
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_947: # %cond.load1717
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 431
-; CHECK-INDEXED-RV32-NEXT:    li a4, 430
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 16
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_948
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_459
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_948: # %cond.load1721
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 432
-; CHECK-INDEXED-RV32-NEXT:    li a4, 431
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 15
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_949
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_460
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_949: # %cond.load1725
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 433
-; CHECK-INDEXED-RV32-NEXT:    li a4, 432
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 14
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_950
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_461
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_950: # %cond.load1729
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 434
-; CHECK-INDEXED-RV32-NEXT:    li a4, 433
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 13
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_951
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_462
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_951: # %cond.load1733
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 435
-; CHECK-INDEXED-RV32-NEXT:    li a4, 434
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 12
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_952
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_463
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_952: # %cond.load1737
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 436
-; CHECK-INDEXED-RV32-NEXT:    li a4, 435
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 11
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_953
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_464
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_953: # %cond.load1741
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 437
-; CHECK-INDEXED-RV32-NEXT:    li a4, 436
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 10
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_954
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_465
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_954: # %cond.load1745
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 438
-; CHECK-INDEXED-RV32-NEXT:    li a4, 437
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 9
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_955
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_466
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_955: # %cond.load1749
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 439
-; CHECK-INDEXED-RV32-NEXT:    li a4, 438
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 8
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_956
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_467
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_956: # %cond.load1753
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 440
-; CHECK-INDEXED-RV32-NEXT:    li a4, 439
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 7
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_957
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_468
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_957: # %cond.load1757
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 441
-; CHECK-INDEXED-RV32-NEXT:    li a4, 440
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 6
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_958
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_469
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_958: # %cond.load1761
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 442
-; CHECK-INDEXED-RV32-NEXT:    li a4, 441
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 5
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_959
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_470
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_959: # %cond.load1765
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 443
-; CHECK-INDEXED-RV32-NEXT:    li a4, 442
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 4
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_960
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_471
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_960: # %cond.load1769
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 444
-; CHECK-INDEXED-RV32-NEXT:    li a4, 443
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 3
-; CHECK-INDEXED-RV32-NEXT:    bltz a3, .LBB61_961
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_472
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_961: # %cond.load1773
-; CHECK-INDEXED-RV32-NEXT:    lbu a3, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a3
-; CHECK-INDEXED-RV32-NEXT:    li a3, 445
-; CHECK-INDEXED-RV32-NEXT:    li a4, 444
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a3, a2, 2
-; CHECK-INDEXED-RV32-NEXT:    bgez a3, .LBB61_1038
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_473
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1038: # %cond.load1773
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_474
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_962: # %cond.load1785
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 448
-; CHECK-INDEXED-RV32-NEXT:    li a4, 447
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_963
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_478
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_963: # %cond.load1789
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 449
-; CHECK-INDEXED-RV32-NEXT:    li a4, 448
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 2
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_964
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_479
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_964: # %cond.load1793
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 450
-; CHECK-INDEXED-RV32-NEXT:    li a4, 449
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 4
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_965
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_480
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_965: # %cond.load1797
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 451
-; CHECK-INDEXED-RV32-NEXT:    li a4, 450
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 8
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_966
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_481
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_966: # %cond.load1801
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 452
-; CHECK-INDEXED-RV32-NEXT:    li a4, 451
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 16
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_967
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_482
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_967: # %cond.load1805
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 453
-; CHECK-INDEXED-RV32-NEXT:    li a4, 452
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 32
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_968
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_483
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_968: # %cond.load1809
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 454
-; CHECK-INDEXED-RV32-NEXT:    li a4, 453
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 64
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_969
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_484
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_969: # %cond.load1813
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 455
-; CHECK-INDEXED-RV32-NEXT:    li a4, 454
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 128
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_970
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_485
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_970: # %cond.load1817
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 456
-; CHECK-INDEXED-RV32-NEXT:    li a4, 455
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 256
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_971
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_486
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_971: # %cond.load1821
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 457
-; CHECK-INDEXED-RV32-NEXT:    li a4, 456
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 512
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_972
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_487
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_972: # %cond.load1825
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 458
-; CHECK-INDEXED-RV32-NEXT:    li a4, 457
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a3, 1024
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_973
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_488
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_973: # %cond.load1829
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 459
-; CHECK-INDEXED-RV32-NEXT:    li a4, 458
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 20
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_974
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_489
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_974: # %cond.load1833
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 460
-; CHECK-INDEXED-RV32-NEXT:    li a4, 459
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 19
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_975
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_490
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_975: # %cond.load1837
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 461
-; CHECK-INDEXED-RV32-NEXT:    li a4, 460
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 18
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_976
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_491
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_976: # %cond.load1841
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 462
-; CHECK-INDEXED-RV32-NEXT:    li a4, 461
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 17
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_977
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_492
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_977: # %cond.load1845
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 463
-; CHECK-INDEXED-RV32-NEXT:    li a4, 462
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 16
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_978
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_493
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_978: # %cond.load1849
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 464
-; CHECK-INDEXED-RV32-NEXT:    li a4, 463
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 15
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_979
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_494
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_979: # %cond.load1853
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 465
-; CHECK-INDEXED-RV32-NEXT:    li a4, 464
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 14
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_980
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_495
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_980: # %cond.load1857
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 466
-; CHECK-INDEXED-RV32-NEXT:    li a4, 465
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 13
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_981
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_496
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_981: # %cond.load1861
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 467
-; CHECK-INDEXED-RV32-NEXT:    li a4, 466
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 12
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_982
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_497
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_982: # %cond.load1865
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 468
-; CHECK-INDEXED-RV32-NEXT:    li a4, 467
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 11
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_983
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_498
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_983: # %cond.load1869
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 469
-; CHECK-INDEXED-RV32-NEXT:    li a4, 468
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 10
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_984
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_499
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_984: # %cond.load1873
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 470
-; CHECK-INDEXED-RV32-NEXT:    li a4, 469
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 9
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_985
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_500
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_985: # %cond.load1877
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 471
-; CHECK-INDEXED-RV32-NEXT:    li a4, 470
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 8
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_986
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_501
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_986: # %cond.load1881
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 472
-; CHECK-INDEXED-RV32-NEXT:    li a4, 471
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 7
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_987
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_502
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_987: # %cond.load1885
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 473
-; CHECK-INDEXED-RV32-NEXT:    li a4, 472
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 6
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_988
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_503
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_988: # %cond.load1889
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 474
-; CHECK-INDEXED-RV32-NEXT:    li a4, 473
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 5
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_989
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_504
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_989: # %cond.load1893
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 475
-; CHECK-INDEXED-RV32-NEXT:    li a4, 474
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 4
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_990
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_505
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_990: # %cond.load1897
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 476
-; CHECK-INDEXED-RV32-NEXT:    li a4, 475
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 3
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_991
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_506
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_991: # %cond.load1901
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a4, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 477
-; CHECK-INDEXED-RV32-NEXT:    li a4, 476
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v24, a4
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a3, 2
-; CHECK-INDEXED-RV32-NEXT:    bgez a2, .LBB61_1039
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_507
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1039: # %cond.load1901
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_508
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_992: # %cond.load1913
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 480
-; CHECK-INDEXED-RV32-NEXT:    li a3, 479
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 1
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_993
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_512
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_993: # %cond.load1917
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a3, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 481
-; CHECK-INDEXED-RV32-NEXT:    li a3, 480
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 2
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_994
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_513
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_994: # %cond.load1921
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a3, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 482
-; CHECK-INDEXED-RV32-NEXT:    li a3, 481
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 4
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_995
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_514
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_995: # %cond.load1925
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a3, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 483
-; CHECK-INDEXED-RV32-NEXT:    li a3, 482
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 8
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_996
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_515
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_996: # %cond.load1929
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a3, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 484
-; CHECK-INDEXED-RV32-NEXT:    li a3, 483
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 16
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_997
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_516
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_997: # %cond.load1933
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a3, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 485
-; CHECK-INDEXED-RV32-NEXT:    li a3, 484
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 32
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_998
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_517
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_998: # %cond.load1937
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a3, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 486
-; CHECK-INDEXED-RV32-NEXT:    li a3, 485
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 64
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_999
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_518
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_999: # %cond.load1941
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a3, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 487
-; CHECK-INDEXED-RV32-NEXT:    li a3, 486
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 128
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_1000
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_519
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1000: # %cond.load1945
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a3, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 488
-; CHECK-INDEXED-RV32-NEXT:    li a3, 487
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 256
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_1001
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_520
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1001: # %cond.load1949
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a3, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 489
-; CHECK-INDEXED-RV32-NEXT:    li a3, 488
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 512
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_1002
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_521
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1002: # %cond.load1953
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a3, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 490
-; CHECK-INDEXED-RV32-NEXT:    li a3, 489
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    andi a2, a1, 1024
-; CHECK-INDEXED-RV32-NEXT:    bnez a2, .LBB61_1003
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_522
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1003: # %cond.load1957
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a3, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 491
-; CHECK-INDEXED-RV32-NEXT:    li a3, 490
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 20
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_1004
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_523
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1004: # %cond.load1961
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a3, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 492
-; CHECK-INDEXED-RV32-NEXT:    li a3, 491
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 19
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_1005
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_524
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1005: # %cond.load1965
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a3, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 493
-; CHECK-INDEXED-RV32-NEXT:    li a3, 492
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 18
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_1006
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_525
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1006: # %cond.load1969
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a3, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 494
-; CHECK-INDEXED-RV32-NEXT:    li a3, 493
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 17
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_1007
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_526
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1007: # %cond.load1973
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a3, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 495
-; CHECK-INDEXED-RV32-NEXT:    li a3, 494
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 16
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_1008
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_527
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1008: # %cond.load1977
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a3, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 496
-; CHECK-INDEXED-RV32-NEXT:    li a3, 495
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 15
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_1009
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_528
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1009: # %cond.load1981
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a3, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 497
-; CHECK-INDEXED-RV32-NEXT:    li a3, 496
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 14
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_1010
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_529
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1010: # %cond.load1985
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a3, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 498
-; CHECK-INDEXED-RV32-NEXT:    li a3, 497
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 13
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_1011
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_530
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1011: # %cond.load1989
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a3, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 499
-; CHECK-INDEXED-RV32-NEXT:    li a3, 498
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 12
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_1012
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_531
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1012: # %cond.load1993
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a3, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 500
-; CHECK-INDEXED-RV32-NEXT:    li a3, 499
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 11
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_1013
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_532
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1013: # %cond.load1997
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a3, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 501
-; CHECK-INDEXED-RV32-NEXT:    li a3, 500
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 10
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_1014
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_533
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1014: # %cond.load2001
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a3, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 502
-; CHECK-INDEXED-RV32-NEXT:    li a3, 501
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 9
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_1015
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_534
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1015: # %cond.load2005
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a3, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 503
-; CHECK-INDEXED-RV32-NEXT:    li a3, 502
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 8
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_1016
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_535
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1016: # %cond.load2009
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a3, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 504
-; CHECK-INDEXED-RV32-NEXT:    li a3, 503
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 7
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_1017
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_536
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1017: # %cond.load2013
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a3, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 505
-; CHECK-INDEXED-RV32-NEXT:    li a3, 504
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 6
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_1018
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_537
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1018: # %cond.load2017
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a3, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 506
-; CHECK-INDEXED-RV32-NEXT:    li a3, 505
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 5
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_1019
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_538
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1019: # %cond.load2021
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a3, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 507
-; CHECK-INDEXED-RV32-NEXT:    li a3, 506
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 4
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_1020
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_539
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1020: # %cond.load2025
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a3, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 508
-; CHECK-INDEXED-RV32-NEXT:    li a3, 507
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 3
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_1021
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_540
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1021: # %cond.load2029
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a3, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 509
-; CHECK-INDEXED-RV32-NEXT:    li a3, 508
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 2
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_1022
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_541
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1022: # %cond.load2033
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a3, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 510
-; CHECK-INDEXED-RV32-NEXT:    li a3, 509
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    slli a2, a1, 1
-; CHECK-INDEXED-RV32-NEXT:    bltz a2, .LBB61_1023
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_542
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1023: # %cond.load2037
-; CHECK-INDEXED-RV32-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a3, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV32-NEXT:    li a2, 511
-; CHECK-INDEXED-RV32-NEXT:    li a3, 510
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV32-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV32-NEXT:    bltz a1, .LBB61_1024
-; CHECK-INDEXED-RV32-NEXT:    j .LBB61_543
-; CHECK-INDEXED-RV32-NEXT:  .LBB61_1024: # %cond.load2041
-; CHECK-INDEXED-RV32-NEXT:    lbu a0, 0(a0)
-; CHECK-INDEXED-RV32-NEXT:    li a1, 512
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    vmv.s.x v16, a0
-; CHECK-INDEXED-RV32-NEXT:    li a0, 511
-; CHECK-INDEXED-RV32-NEXT:    vslideup.vx v8, v16, a0
-; CHECK-INDEXED-RV32-NEXT:    ret
+; CHECK-RV32-LABEL: test_expandload_v512i8_vlen512:
+; CHECK-RV32:       # %bb.0:
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.x.s a3, v0
+; CHECK-RV32-NEXT:    andi a1, a3, 1
+; CHECK-RV32-NEXT:    beqz a1, .LBB61_1
+; CHECK-RV32-NEXT:    j .LBB61_544
+; CHECK-RV32-NEXT:  .LBB61_1: # %else
+; CHECK-RV32-NEXT:    andi a1, a3, 2
+; CHECK-RV32-NEXT:    beqz a1, .LBB61_2
+; CHECK-RV32-NEXT:    j .LBB61_545
+; CHECK-RV32-NEXT:  .LBB61_2: # %else2
+; CHECK-RV32-NEXT:    andi a1, a3, 4
+; CHECK-RV32-NEXT:    beqz a1, .LBB61_3
+; CHECK-RV32-NEXT:    j .LBB61_546
+; CHECK-RV32-NEXT:  .LBB61_3: # %else6
+; CHECK-RV32-NEXT:    andi a1, a3, 8
+; CHECK-RV32-NEXT:    beqz a1, .LBB61_4
+; CHECK-RV32-NEXT:    j .LBB61_547
+; CHECK-RV32-NEXT:  .LBB61_4: # %else10
+; CHECK-RV32-NEXT:    andi a1, a3, 16
+; CHECK-RV32-NEXT:    beqz a1, .LBB61_5
+; CHECK-RV32-NEXT:    j .LBB61_548
+; CHECK-RV32-NEXT:  .LBB61_5: # %else14
+; CHECK-RV32-NEXT:    andi a1, a3, 32
+; CHECK-RV32-NEXT:    beqz a1, .LBB61_6
+; CHECK-RV32-NEXT:    j .LBB61_549
+; CHECK-RV32-NEXT:  .LBB61_6: # %else18
+; CHECK-RV32-NEXT:    andi a1, a3, 64
+; CHECK-RV32-NEXT:    beqz a1, .LBB61_7
+; CHECK-RV32-NEXT:    j .LBB61_550
+; CHECK-RV32-NEXT:  .LBB61_7: # %else22
+; CHECK-RV32-NEXT:    andi a1, a3, 128
+; CHECK-RV32-NEXT:    beqz a1, .LBB61_8
+; CHECK-RV32-NEXT:    j .LBB61_551
+; CHECK-RV32-NEXT:  .LBB61_8: # %else26
+; CHECK-RV32-NEXT:    andi a1, a3, 256
+; CHECK-RV32-NEXT:    beqz a1, .LBB61_9
+; CHECK-RV32-NEXT:    j .LBB61_552
+; CHECK-RV32-NEXT:  .LBB61_9: # %else30
+; CHECK-RV32-NEXT:    andi a1, a3, 512
+; CHECK-RV32-NEXT:    beqz a1, .LBB61_10
+; CHECK-RV32-NEXT:    j .LBB61_553
+; CHECK-RV32-NEXT:  .LBB61_10: # %else34
+; CHECK-RV32-NEXT:    andi a1, a3, 1024
+; CHECK-RV32-NEXT:    beqz a1, .LBB61_11
+; CHECK-RV32-NEXT:    j .LBB61_554
+; CHECK-RV32-NEXT:  .LBB61_11: # %else38
+; CHECK-RV32-NEXT:    slli a1, a3, 20
+; CHECK-RV32-NEXT:    bgez a1, .LBB61_12
+; CHECK-RV32-NEXT:    j .LBB61_555
+; CHECK-RV32-NEXT:  .LBB61_12: # %else42
+; CHECK-RV32-NEXT:    slli a1, a3, 19
+; CHECK-RV32-NEXT:    bgez a1, .LBB61_13
+; CHECK-RV32-NEXT:    j .LBB61_556
+; CHECK-RV32-NEXT:  .LBB61_13: # %else46
+; CHECK-RV32-NEXT:    slli a1, a3, 18
+; CHECK-RV32-NEXT:    bgez a1, .LBB61_14
+; CHECK-RV32-NEXT:    j .LBB61_557
+; CHECK-RV32-NEXT:  .LBB61_14: # %else50
+; CHECK-RV32-NEXT:    slli a1, a3, 17
+; CHECK-RV32-NEXT:    bgez a1, .LBB61_15
+; CHECK-RV32-NEXT:    j .LBB61_558
+; CHECK-RV32-NEXT:  .LBB61_15: # %else54
+; CHECK-RV32-NEXT:    slli a1, a3, 16
+; CHECK-RV32-NEXT:    bgez a1, .LBB61_16
+; CHECK-RV32-NEXT:    j .LBB61_559
+; CHECK-RV32-NEXT:  .LBB61_16: # %else58
+; CHECK-RV32-NEXT:    slli a1, a3, 15
+; CHECK-RV32-NEXT:    bgez a1, .LBB61_17
+; CHECK-RV32-NEXT:    j .LBB61_560
+; CHECK-RV32-NEXT:  .LBB61_17: # %else62
+; CHECK-RV32-NEXT:    slli a1, a3, 14
+; CHECK-RV32-NEXT:    bgez a1, .LBB61_18
+; CHECK-RV32-NEXT:    j .LBB61_561
+; CHECK-RV32-NEXT:  .LBB61_18: # %else66
+; CHECK-RV32-NEXT:    slli a1, a3, 13
+; CHECK-RV32-NEXT:    bgez a1, .LBB61_19
+; CHECK-RV32-NEXT:    j .LBB61_562
+; CHECK-RV32-NEXT:  .LBB61_19: # %else70
+; CHECK-RV32-NEXT:    slli a1, a3, 12
+; CHECK-RV32-NEXT:    bgez a1, .LBB61_20
+; CHECK-RV32-NEXT:    j .LBB61_563
+; CHECK-RV32-NEXT:  .LBB61_20: # %else74
+; CHECK-RV32-NEXT:    slli a1, a3, 11
+; CHECK-RV32-NEXT:    bgez a1, .LBB61_21
+; CHECK-RV32-NEXT:    j .LBB61_564
+; CHECK-RV32-NEXT:  .LBB61_21: # %else78
+; CHECK-RV32-NEXT:    slli a1, a3, 10
+; CHECK-RV32-NEXT:    bgez a1, .LBB61_22
+; CHECK-RV32-NEXT:    j .LBB61_565
+; CHECK-RV32-NEXT:  .LBB61_22: # %else82
+; CHECK-RV32-NEXT:    slli a1, a3, 9
+; CHECK-RV32-NEXT:    bgez a1, .LBB61_23
+; CHECK-RV32-NEXT:    j .LBB61_566
+; CHECK-RV32-NEXT:  .LBB61_23: # %else86
+; CHECK-RV32-NEXT:    slli a1, a3, 8
+; CHECK-RV32-NEXT:    bgez a1, .LBB61_24
+; CHECK-RV32-NEXT:    j .LBB61_567
+; CHECK-RV32-NEXT:  .LBB61_24: # %else90
+; CHECK-RV32-NEXT:    slli a1, a3, 7
+; CHECK-RV32-NEXT:    bgez a1, .LBB61_25
+; CHECK-RV32-NEXT:    j .LBB61_568
+; CHECK-RV32-NEXT:  .LBB61_25: # %else94
+; CHECK-RV32-NEXT:    slli a1, a3, 6
+; CHECK-RV32-NEXT:    bgez a1, .LBB61_26
+; CHECK-RV32-NEXT:    j .LBB61_569
+; CHECK-RV32-NEXT:  .LBB61_26: # %else98
+; CHECK-RV32-NEXT:    slli a1, a3, 5
+; CHECK-RV32-NEXT:    bgez a1, .LBB61_27
+; CHECK-RV32-NEXT:    j .LBB61_570
+; CHECK-RV32-NEXT:  .LBB61_27: # %else102
+; CHECK-RV32-NEXT:    slli a1, a3, 4
+; CHECK-RV32-NEXT:    bgez a1, .LBB61_28
+; CHECK-RV32-NEXT:    j .LBB61_571
+; CHECK-RV32-NEXT:  .LBB61_28: # %else106
+; CHECK-RV32-NEXT:    slli a1, a3, 3
+; CHECK-RV32-NEXT:    bgez a1, .LBB61_30
+; CHECK-RV32-NEXT:  .LBB61_29: # %cond.load109
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 29, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 28
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:  .LBB61_30: # %else110
+; CHECK-RV32-NEXT:    slli a2, a3, 2
+; CHECK-RV32-NEXT:    li a1, 32
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_32
+; CHECK-RV32-NEXT:  # %bb.31: # %cond.load113
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 30, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 29
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:  .LBB61_32: # %else114
+; CHECK-RV32-NEXT:    slli a2, a3, 1
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vsrl.vx v16, v0, a1
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_34
+; CHECK-RV32-NEXT:  # %bb.33: # %cond.load117
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v17, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vsetivli zero, 31, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vi v8, v17, 30
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:  .LBB61_34: # %else118
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.x.s a2, v16
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_35
+; CHECK-RV32-NEXT:    j .LBB61_572
+; CHECK-RV32-NEXT:  .LBB61_35: # %else122
+; CHECK-RV32-NEXT:    andi a3, a2, 1
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_36
+; CHECK-RV32-NEXT:    j .LBB61_573
+; CHECK-RV32-NEXT:  .LBB61_36: # %else126
+; CHECK-RV32-NEXT:    andi a3, a2, 2
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_37
+; CHECK-RV32-NEXT:    j .LBB61_574
+; CHECK-RV32-NEXT:  .LBB61_37: # %else130
+; CHECK-RV32-NEXT:    andi a3, a2, 4
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_38
+; CHECK-RV32-NEXT:    j .LBB61_575
+; CHECK-RV32-NEXT:  .LBB61_38: # %else134
+; CHECK-RV32-NEXT:    andi a3, a2, 8
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_39
+; CHECK-RV32-NEXT:    j .LBB61_576
+; CHECK-RV32-NEXT:  .LBB61_39: # %else138
+; CHECK-RV32-NEXT:    andi a3, a2, 16
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_40
+; CHECK-RV32-NEXT:    j .LBB61_577
+; CHECK-RV32-NEXT:  .LBB61_40: # %else142
+; CHECK-RV32-NEXT:    andi a3, a2, 32
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_41
+; CHECK-RV32-NEXT:    j .LBB61_578
+; CHECK-RV32-NEXT:  .LBB61_41: # %else146
+; CHECK-RV32-NEXT:    andi a3, a2, 64
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_42
+; CHECK-RV32-NEXT:    j .LBB61_579
+; CHECK-RV32-NEXT:  .LBB61_42: # %else150
+; CHECK-RV32-NEXT:    andi a3, a2, 128
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_43
+; CHECK-RV32-NEXT:    j .LBB61_580
+; CHECK-RV32-NEXT:  .LBB61_43: # %else154
+; CHECK-RV32-NEXT:    andi a3, a2, 256
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_44
+; CHECK-RV32-NEXT:    j .LBB61_581
+; CHECK-RV32-NEXT:  .LBB61_44: # %else158
+; CHECK-RV32-NEXT:    andi a3, a2, 512
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_45
+; CHECK-RV32-NEXT:    j .LBB61_582
+; CHECK-RV32-NEXT:  .LBB61_45: # %else162
+; CHECK-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_46
+; CHECK-RV32-NEXT:    j .LBB61_583
+; CHECK-RV32-NEXT:  .LBB61_46: # %else166
+; CHECK-RV32-NEXT:    slli a3, a2, 20
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_47
+; CHECK-RV32-NEXT:    j .LBB61_584
+; CHECK-RV32-NEXT:  .LBB61_47: # %else170
+; CHECK-RV32-NEXT:    slli a3, a2, 19
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_48
+; CHECK-RV32-NEXT:    j .LBB61_585
+; CHECK-RV32-NEXT:  .LBB61_48: # %else174
+; CHECK-RV32-NEXT:    slli a3, a2, 18
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_49
+; CHECK-RV32-NEXT:    j .LBB61_586
+; CHECK-RV32-NEXT:  .LBB61_49: # %else178
+; CHECK-RV32-NEXT:    slli a3, a2, 17
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_50
+; CHECK-RV32-NEXT:    j .LBB61_587
+; CHECK-RV32-NEXT:  .LBB61_50: # %else182
+; CHECK-RV32-NEXT:    slli a3, a2, 16
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_51
+; CHECK-RV32-NEXT:    j .LBB61_588
+; CHECK-RV32-NEXT:  .LBB61_51: # %else186
+; CHECK-RV32-NEXT:    slli a3, a2, 15
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_52
+; CHECK-RV32-NEXT:    j .LBB61_589
+; CHECK-RV32-NEXT:  .LBB61_52: # %else190
+; CHECK-RV32-NEXT:    slli a3, a2, 14
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_53
+; CHECK-RV32-NEXT:    j .LBB61_590
+; CHECK-RV32-NEXT:  .LBB61_53: # %else194
+; CHECK-RV32-NEXT:    slli a3, a2, 13
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_54
+; CHECK-RV32-NEXT:    j .LBB61_591
+; CHECK-RV32-NEXT:  .LBB61_54: # %else198
+; CHECK-RV32-NEXT:    slli a3, a2, 12
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_55
+; CHECK-RV32-NEXT:    j .LBB61_592
+; CHECK-RV32-NEXT:  .LBB61_55: # %else202
+; CHECK-RV32-NEXT:    slli a3, a2, 11
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_56
+; CHECK-RV32-NEXT:    j .LBB61_593
+; CHECK-RV32-NEXT:  .LBB61_56: # %else206
+; CHECK-RV32-NEXT:    slli a3, a2, 10
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_57
+; CHECK-RV32-NEXT:    j .LBB61_594
+; CHECK-RV32-NEXT:  .LBB61_57: # %else210
+; CHECK-RV32-NEXT:    slli a3, a2, 9
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_58
+; CHECK-RV32-NEXT:    j .LBB61_595
+; CHECK-RV32-NEXT:  .LBB61_58: # %else214
+; CHECK-RV32-NEXT:    slli a3, a2, 8
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_59
+; CHECK-RV32-NEXT:    j .LBB61_596
+; CHECK-RV32-NEXT:  .LBB61_59: # %else218
+; CHECK-RV32-NEXT:    slli a3, a2, 7
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_60
+; CHECK-RV32-NEXT:    j .LBB61_597
+; CHECK-RV32-NEXT:  .LBB61_60: # %else222
+; CHECK-RV32-NEXT:    slli a3, a2, 6
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_61
+; CHECK-RV32-NEXT:    j .LBB61_598
+; CHECK-RV32-NEXT:  .LBB61_61: # %else226
+; CHECK-RV32-NEXT:    slli a3, a2, 5
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_62
+; CHECK-RV32-NEXT:    j .LBB61_599
+; CHECK-RV32-NEXT:  .LBB61_62: # %else230
+; CHECK-RV32-NEXT:    slli a3, a2, 4
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_63
+; CHECK-RV32-NEXT:    j .LBB61_600
+; CHECK-RV32-NEXT:  .LBB61_63: # %else234
+; CHECK-RV32-NEXT:    slli a3, a2, 3
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_64
+; CHECK-RV32-NEXT:    j .LBB61_601
+; CHECK-RV32-NEXT:  .LBB61_64: # %else238
+; CHECK-RV32-NEXT:    slli a3, a2, 2
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_66
+; CHECK-RV32-NEXT:  .LBB61_65: # %cond.load241
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 62
+; CHECK-RV32-NEXT:    li a4, 61
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:  .LBB61_66: # %else242
+; CHECK-RV32-NEXT:    slli a3, a2, 1
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vslidedown.vi v16, v0, 1
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_68
+; CHECK-RV32-NEXT:  # %bb.67: # %cond.load245
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v17, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 63
+; CHECK-RV32-NEXT:    li a4, 62
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v17, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:  .LBB61_68: # %else246
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.x.s a3, v16
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_69
+; CHECK-RV32-NEXT:    j .LBB61_602
+; CHECK-RV32-NEXT:  .LBB61_69: # %else250
+; CHECK-RV32-NEXT:    andi a2, a3, 1
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_70
+; CHECK-RV32-NEXT:    j .LBB61_603
+; CHECK-RV32-NEXT:  .LBB61_70: # %else254
+; CHECK-RV32-NEXT:    andi a2, a3, 2
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_71
+; CHECK-RV32-NEXT:    j .LBB61_604
+; CHECK-RV32-NEXT:  .LBB61_71: # %else258
+; CHECK-RV32-NEXT:    andi a2, a3, 4
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_72
+; CHECK-RV32-NEXT:    j .LBB61_605
+; CHECK-RV32-NEXT:  .LBB61_72: # %else262
+; CHECK-RV32-NEXT:    andi a2, a3, 8
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_73
+; CHECK-RV32-NEXT:    j .LBB61_606
+; CHECK-RV32-NEXT:  .LBB61_73: # %else266
+; CHECK-RV32-NEXT:    andi a2, a3, 16
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_74
+; CHECK-RV32-NEXT:    j .LBB61_607
+; CHECK-RV32-NEXT:  .LBB61_74: # %else270
+; CHECK-RV32-NEXT:    andi a2, a3, 32
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_75
+; CHECK-RV32-NEXT:    j .LBB61_608
+; CHECK-RV32-NEXT:  .LBB61_75: # %else274
+; CHECK-RV32-NEXT:    andi a2, a3, 64
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_76
+; CHECK-RV32-NEXT:    j .LBB61_609
+; CHECK-RV32-NEXT:  .LBB61_76: # %else278
+; CHECK-RV32-NEXT:    andi a2, a3, 128
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_77
+; CHECK-RV32-NEXT:    j .LBB61_610
+; CHECK-RV32-NEXT:  .LBB61_77: # %else282
+; CHECK-RV32-NEXT:    andi a2, a3, 256
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_78
+; CHECK-RV32-NEXT:    j .LBB61_611
+; CHECK-RV32-NEXT:  .LBB61_78: # %else286
+; CHECK-RV32-NEXT:    andi a2, a3, 512
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_79
+; CHECK-RV32-NEXT:    j .LBB61_612
+; CHECK-RV32-NEXT:  .LBB61_79: # %else290
+; CHECK-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_80
+; CHECK-RV32-NEXT:    j .LBB61_613
+; CHECK-RV32-NEXT:  .LBB61_80: # %else294
+; CHECK-RV32-NEXT:    slli a2, a3, 20
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_81
+; CHECK-RV32-NEXT:    j .LBB61_614
+; CHECK-RV32-NEXT:  .LBB61_81: # %else298
+; CHECK-RV32-NEXT:    slli a2, a3, 19
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_82
+; CHECK-RV32-NEXT:    j .LBB61_615
+; CHECK-RV32-NEXT:  .LBB61_82: # %else302
+; CHECK-RV32-NEXT:    slli a2, a3, 18
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_83
+; CHECK-RV32-NEXT:    j .LBB61_616
+; CHECK-RV32-NEXT:  .LBB61_83: # %else306
+; CHECK-RV32-NEXT:    slli a2, a3, 17
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_84
+; CHECK-RV32-NEXT:    j .LBB61_617
+; CHECK-RV32-NEXT:  .LBB61_84: # %else310
+; CHECK-RV32-NEXT:    slli a2, a3, 16
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_85
+; CHECK-RV32-NEXT:    j .LBB61_618
+; CHECK-RV32-NEXT:  .LBB61_85: # %else314
+; CHECK-RV32-NEXT:    slli a2, a3, 15
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_86
+; CHECK-RV32-NEXT:    j .LBB61_619
+; CHECK-RV32-NEXT:  .LBB61_86: # %else318
+; CHECK-RV32-NEXT:    slli a2, a3, 14
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_87
+; CHECK-RV32-NEXT:    j .LBB61_620
+; CHECK-RV32-NEXT:  .LBB61_87: # %else322
+; CHECK-RV32-NEXT:    slli a2, a3, 13
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_88
+; CHECK-RV32-NEXT:    j .LBB61_621
+; CHECK-RV32-NEXT:  .LBB61_88: # %else326
+; CHECK-RV32-NEXT:    slli a2, a3, 12
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_89
+; CHECK-RV32-NEXT:    j .LBB61_622
+; CHECK-RV32-NEXT:  .LBB61_89: # %else330
+; CHECK-RV32-NEXT:    slli a2, a3, 11
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_90
+; CHECK-RV32-NEXT:    j .LBB61_623
+; CHECK-RV32-NEXT:  .LBB61_90: # %else334
+; CHECK-RV32-NEXT:    slli a2, a3, 10
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_91
+; CHECK-RV32-NEXT:    j .LBB61_624
+; CHECK-RV32-NEXT:  .LBB61_91: # %else338
+; CHECK-RV32-NEXT:    slli a2, a3, 9
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_92
+; CHECK-RV32-NEXT:    j .LBB61_625
+; CHECK-RV32-NEXT:  .LBB61_92: # %else342
+; CHECK-RV32-NEXT:    slli a2, a3, 8
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_93
+; CHECK-RV32-NEXT:    j .LBB61_626
+; CHECK-RV32-NEXT:  .LBB61_93: # %else346
+; CHECK-RV32-NEXT:    slli a2, a3, 7
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_94
+; CHECK-RV32-NEXT:    j .LBB61_627
+; CHECK-RV32-NEXT:  .LBB61_94: # %else350
+; CHECK-RV32-NEXT:    slli a2, a3, 6
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_95
+; CHECK-RV32-NEXT:    j .LBB61_628
+; CHECK-RV32-NEXT:  .LBB61_95: # %else354
+; CHECK-RV32-NEXT:    slli a2, a3, 5
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_96
+; CHECK-RV32-NEXT:    j .LBB61_629
+; CHECK-RV32-NEXT:  .LBB61_96: # %else358
+; CHECK-RV32-NEXT:    slli a2, a3, 4
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_97
+; CHECK-RV32-NEXT:    j .LBB61_630
+; CHECK-RV32-NEXT:  .LBB61_97: # %else362
+; CHECK-RV32-NEXT:    slli a2, a3, 3
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_98
+; CHECK-RV32-NEXT:    j .LBB61_631
+; CHECK-RV32-NEXT:  .LBB61_98: # %else366
+; CHECK-RV32-NEXT:    slli a2, a3, 2
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_100
+; CHECK-RV32-NEXT:  .LBB61_99: # %cond.load369
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 94
+; CHECK-RV32-NEXT:    li a4, 93
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:  .LBB61_100: # %else370
+; CHECK-RV32-NEXT:    slli a2, a3, 1
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vsrl.vx v16, v16, a1
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_102
+; CHECK-RV32-NEXT:  # %bb.101: # %cond.load373
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 95
+; CHECK-RV32-NEXT:    li a4, 94
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:  .LBB61_102: # %else374
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.x.s a2, v16
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_103
+; CHECK-RV32-NEXT:    j .LBB61_632
+; CHECK-RV32-NEXT:  .LBB61_103: # %else378
+; CHECK-RV32-NEXT:    andi a3, a2, 1
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_104
+; CHECK-RV32-NEXT:    j .LBB61_633
+; CHECK-RV32-NEXT:  .LBB61_104: # %else382
+; CHECK-RV32-NEXT:    andi a3, a2, 2
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_105
+; CHECK-RV32-NEXT:    j .LBB61_634
+; CHECK-RV32-NEXT:  .LBB61_105: # %else386
+; CHECK-RV32-NEXT:    andi a3, a2, 4
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_106
+; CHECK-RV32-NEXT:    j .LBB61_635
+; CHECK-RV32-NEXT:  .LBB61_106: # %else390
+; CHECK-RV32-NEXT:    andi a3, a2, 8
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_107
+; CHECK-RV32-NEXT:    j .LBB61_636
+; CHECK-RV32-NEXT:  .LBB61_107: # %else394
+; CHECK-RV32-NEXT:    andi a3, a2, 16
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_108
+; CHECK-RV32-NEXT:    j .LBB61_637
+; CHECK-RV32-NEXT:  .LBB61_108: # %else398
+; CHECK-RV32-NEXT:    andi a3, a2, 32
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_109
+; CHECK-RV32-NEXT:    j .LBB61_638
+; CHECK-RV32-NEXT:  .LBB61_109: # %else402
+; CHECK-RV32-NEXT:    andi a3, a2, 64
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_110
+; CHECK-RV32-NEXT:    j .LBB61_639
+; CHECK-RV32-NEXT:  .LBB61_110: # %else406
+; CHECK-RV32-NEXT:    andi a3, a2, 128
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_111
+; CHECK-RV32-NEXT:    j .LBB61_640
+; CHECK-RV32-NEXT:  .LBB61_111: # %else410
+; CHECK-RV32-NEXT:    andi a3, a2, 256
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_112
+; CHECK-RV32-NEXT:    j .LBB61_641
+; CHECK-RV32-NEXT:  .LBB61_112: # %else414
+; CHECK-RV32-NEXT:    andi a3, a2, 512
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_113
+; CHECK-RV32-NEXT:    j .LBB61_642
+; CHECK-RV32-NEXT:  .LBB61_113: # %else418
+; CHECK-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_114
+; CHECK-RV32-NEXT:    j .LBB61_643
+; CHECK-RV32-NEXT:  .LBB61_114: # %else422
+; CHECK-RV32-NEXT:    slli a3, a2, 20
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_115
+; CHECK-RV32-NEXT:    j .LBB61_644
+; CHECK-RV32-NEXT:  .LBB61_115: # %else426
+; CHECK-RV32-NEXT:    slli a3, a2, 19
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_116
+; CHECK-RV32-NEXT:    j .LBB61_645
+; CHECK-RV32-NEXT:  .LBB61_116: # %else430
+; CHECK-RV32-NEXT:    slli a3, a2, 18
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_117
+; CHECK-RV32-NEXT:    j .LBB61_646
+; CHECK-RV32-NEXT:  .LBB61_117: # %else434
+; CHECK-RV32-NEXT:    slli a3, a2, 17
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_118
+; CHECK-RV32-NEXT:    j .LBB61_647
+; CHECK-RV32-NEXT:  .LBB61_118: # %else438
+; CHECK-RV32-NEXT:    slli a3, a2, 16
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_119
+; CHECK-RV32-NEXT:    j .LBB61_648
+; CHECK-RV32-NEXT:  .LBB61_119: # %else442
+; CHECK-RV32-NEXT:    slli a3, a2, 15
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_120
+; CHECK-RV32-NEXT:    j .LBB61_649
+; CHECK-RV32-NEXT:  .LBB61_120: # %else446
+; CHECK-RV32-NEXT:    slli a3, a2, 14
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_121
+; CHECK-RV32-NEXT:    j .LBB61_650
+; CHECK-RV32-NEXT:  .LBB61_121: # %else450
+; CHECK-RV32-NEXT:    slli a3, a2, 13
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_122
+; CHECK-RV32-NEXT:    j .LBB61_651
+; CHECK-RV32-NEXT:  .LBB61_122: # %else454
+; CHECK-RV32-NEXT:    slli a3, a2, 12
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_123
+; CHECK-RV32-NEXT:    j .LBB61_652
+; CHECK-RV32-NEXT:  .LBB61_123: # %else458
+; CHECK-RV32-NEXT:    slli a3, a2, 11
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_124
+; CHECK-RV32-NEXT:    j .LBB61_653
+; CHECK-RV32-NEXT:  .LBB61_124: # %else462
+; CHECK-RV32-NEXT:    slli a3, a2, 10
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_125
+; CHECK-RV32-NEXT:    j .LBB61_654
+; CHECK-RV32-NEXT:  .LBB61_125: # %else466
+; CHECK-RV32-NEXT:    slli a3, a2, 9
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_126
+; CHECK-RV32-NEXT:    j .LBB61_655
+; CHECK-RV32-NEXT:  .LBB61_126: # %else470
+; CHECK-RV32-NEXT:    slli a3, a2, 8
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_127
+; CHECK-RV32-NEXT:    j .LBB61_656
+; CHECK-RV32-NEXT:  .LBB61_127: # %else474
+; CHECK-RV32-NEXT:    slli a3, a2, 7
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_128
+; CHECK-RV32-NEXT:    j .LBB61_657
+; CHECK-RV32-NEXT:  .LBB61_128: # %else478
+; CHECK-RV32-NEXT:    slli a3, a2, 6
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_129
+; CHECK-RV32-NEXT:    j .LBB61_658
+; CHECK-RV32-NEXT:  .LBB61_129: # %else482
+; CHECK-RV32-NEXT:    slli a3, a2, 5
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_130
+; CHECK-RV32-NEXT:    j .LBB61_659
+; CHECK-RV32-NEXT:  .LBB61_130: # %else486
+; CHECK-RV32-NEXT:    slli a3, a2, 4
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_131
+; CHECK-RV32-NEXT:    j .LBB61_660
+; CHECK-RV32-NEXT:  .LBB61_131: # %else490
+; CHECK-RV32-NEXT:    slli a3, a2, 3
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_132
+; CHECK-RV32-NEXT:    j .LBB61_661
+; CHECK-RV32-NEXT:  .LBB61_132: # %else494
+; CHECK-RV32-NEXT:    slli a3, a2, 2
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_134
+; CHECK-RV32-NEXT:  .LBB61_133: # %cond.load497
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 126
+; CHECK-RV32-NEXT:    li a4, 125
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:  .LBB61_134: # %else498
+; CHECK-RV32-NEXT:    slli a3, a2, 1
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vslidedown.vi v16, v0, 2
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_136
+; CHECK-RV32-NEXT:  # %bb.135: # %cond.load501
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v18, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 127
+; CHECK-RV32-NEXT:    li a4, 126
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:  .LBB61_136: # %else502
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.x.s a3, v16
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_137
+; CHECK-RV32-NEXT:    j .LBB61_662
+; CHECK-RV32-NEXT:  .LBB61_137: # %else506
+; CHECK-RV32-NEXT:    andi a2, a3, 1
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_138
+; CHECK-RV32-NEXT:    j .LBB61_663
+; CHECK-RV32-NEXT:  .LBB61_138: # %else510
+; CHECK-RV32-NEXT:    andi a2, a3, 2
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_139
+; CHECK-RV32-NEXT:    j .LBB61_664
+; CHECK-RV32-NEXT:  .LBB61_139: # %else514
+; CHECK-RV32-NEXT:    andi a2, a3, 4
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_140
+; CHECK-RV32-NEXT:    j .LBB61_665
+; CHECK-RV32-NEXT:  .LBB61_140: # %else518
+; CHECK-RV32-NEXT:    andi a2, a3, 8
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_141
+; CHECK-RV32-NEXT:    j .LBB61_666
+; CHECK-RV32-NEXT:  .LBB61_141: # %else522
+; CHECK-RV32-NEXT:    andi a2, a3, 16
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_142
+; CHECK-RV32-NEXT:    j .LBB61_667
+; CHECK-RV32-NEXT:  .LBB61_142: # %else526
+; CHECK-RV32-NEXT:    andi a2, a3, 32
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_143
+; CHECK-RV32-NEXT:    j .LBB61_668
+; CHECK-RV32-NEXT:  .LBB61_143: # %else530
+; CHECK-RV32-NEXT:    andi a2, a3, 64
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_144
+; CHECK-RV32-NEXT:    j .LBB61_669
+; CHECK-RV32-NEXT:  .LBB61_144: # %else534
+; CHECK-RV32-NEXT:    andi a2, a3, 128
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_145
+; CHECK-RV32-NEXT:    j .LBB61_670
+; CHECK-RV32-NEXT:  .LBB61_145: # %else538
+; CHECK-RV32-NEXT:    andi a2, a3, 256
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_146
+; CHECK-RV32-NEXT:    j .LBB61_671
+; CHECK-RV32-NEXT:  .LBB61_146: # %else542
+; CHECK-RV32-NEXT:    andi a2, a3, 512
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_147
+; CHECK-RV32-NEXT:    j .LBB61_672
+; CHECK-RV32-NEXT:  .LBB61_147: # %else546
+; CHECK-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_148
+; CHECK-RV32-NEXT:    j .LBB61_673
+; CHECK-RV32-NEXT:  .LBB61_148: # %else550
+; CHECK-RV32-NEXT:    slli a2, a3, 20
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_149
+; CHECK-RV32-NEXT:    j .LBB61_674
+; CHECK-RV32-NEXT:  .LBB61_149: # %else554
+; CHECK-RV32-NEXT:    slli a2, a3, 19
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_150
+; CHECK-RV32-NEXT:    j .LBB61_675
+; CHECK-RV32-NEXT:  .LBB61_150: # %else558
+; CHECK-RV32-NEXT:    slli a2, a3, 18
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_151
+; CHECK-RV32-NEXT:    j .LBB61_676
+; CHECK-RV32-NEXT:  .LBB61_151: # %else562
+; CHECK-RV32-NEXT:    slli a2, a3, 17
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_152
+; CHECK-RV32-NEXT:    j .LBB61_677
+; CHECK-RV32-NEXT:  .LBB61_152: # %else566
+; CHECK-RV32-NEXT:    slli a2, a3, 16
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_153
+; CHECK-RV32-NEXT:    j .LBB61_678
+; CHECK-RV32-NEXT:  .LBB61_153: # %else570
+; CHECK-RV32-NEXT:    slli a2, a3, 15
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_154
+; CHECK-RV32-NEXT:    j .LBB61_679
+; CHECK-RV32-NEXT:  .LBB61_154: # %else574
+; CHECK-RV32-NEXT:    slli a2, a3, 14
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_155
+; CHECK-RV32-NEXT:    j .LBB61_680
+; CHECK-RV32-NEXT:  .LBB61_155: # %else578
+; CHECK-RV32-NEXT:    slli a2, a3, 13
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_156
+; CHECK-RV32-NEXT:    j .LBB61_681
+; CHECK-RV32-NEXT:  .LBB61_156: # %else582
+; CHECK-RV32-NEXT:    slli a2, a3, 12
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_157
+; CHECK-RV32-NEXT:    j .LBB61_682
+; CHECK-RV32-NEXT:  .LBB61_157: # %else586
+; CHECK-RV32-NEXT:    slli a2, a3, 11
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_158
+; CHECK-RV32-NEXT:    j .LBB61_683
+; CHECK-RV32-NEXT:  .LBB61_158: # %else590
+; CHECK-RV32-NEXT:    slli a2, a3, 10
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_159
+; CHECK-RV32-NEXT:    j .LBB61_684
+; CHECK-RV32-NEXT:  .LBB61_159: # %else594
+; CHECK-RV32-NEXT:    slli a2, a3, 9
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_160
+; CHECK-RV32-NEXT:    j .LBB61_685
+; CHECK-RV32-NEXT:  .LBB61_160: # %else598
+; CHECK-RV32-NEXT:    slli a2, a3, 8
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_161
+; CHECK-RV32-NEXT:    j .LBB61_686
+; CHECK-RV32-NEXT:  .LBB61_161: # %else602
+; CHECK-RV32-NEXT:    slli a2, a3, 7
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_162
+; CHECK-RV32-NEXT:    j .LBB61_687
+; CHECK-RV32-NEXT:  .LBB61_162: # %else606
+; CHECK-RV32-NEXT:    slli a2, a3, 6
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_163
+; CHECK-RV32-NEXT:    j .LBB61_688
+; CHECK-RV32-NEXT:  .LBB61_163: # %else610
+; CHECK-RV32-NEXT:    slli a2, a3, 5
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_164
+; CHECK-RV32-NEXT:    j .LBB61_689
+; CHECK-RV32-NEXT:  .LBB61_164: # %else614
+; CHECK-RV32-NEXT:    slli a2, a3, 4
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_165
+; CHECK-RV32-NEXT:    j .LBB61_690
+; CHECK-RV32-NEXT:  .LBB61_165: # %else618
+; CHECK-RV32-NEXT:    slli a2, a3, 3
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_166
+; CHECK-RV32-NEXT:    j .LBB61_691
+; CHECK-RV32-NEXT:  .LBB61_166: # %else622
+; CHECK-RV32-NEXT:    slli a2, a3, 2
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_168
+; CHECK-RV32-NEXT:  .LBB61_167: # %cond.load625
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 158
+; CHECK-RV32-NEXT:    li a4, 157
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:  .LBB61_168: # %else626
+; CHECK-RV32-NEXT:    slli a2, a3, 1
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vsrl.vx v16, v16, a1
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_170
+; CHECK-RV32-NEXT:  # %bb.169: # %cond.load629
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 159
+; CHECK-RV32-NEXT:    li a4, 158
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:  .LBB61_170: # %else630
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.x.s a2, v16
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_171
+; CHECK-RV32-NEXT:    j .LBB61_692
+; CHECK-RV32-NEXT:  .LBB61_171: # %else634
+; CHECK-RV32-NEXT:    andi a3, a2, 1
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_172
+; CHECK-RV32-NEXT:    j .LBB61_693
+; CHECK-RV32-NEXT:  .LBB61_172: # %else638
+; CHECK-RV32-NEXT:    andi a3, a2, 2
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_173
+; CHECK-RV32-NEXT:    j .LBB61_694
+; CHECK-RV32-NEXT:  .LBB61_173: # %else642
+; CHECK-RV32-NEXT:    andi a3, a2, 4
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_174
+; CHECK-RV32-NEXT:    j .LBB61_695
+; CHECK-RV32-NEXT:  .LBB61_174: # %else646
+; CHECK-RV32-NEXT:    andi a3, a2, 8
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_175
+; CHECK-RV32-NEXT:    j .LBB61_696
+; CHECK-RV32-NEXT:  .LBB61_175: # %else650
+; CHECK-RV32-NEXT:    andi a3, a2, 16
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_176
+; CHECK-RV32-NEXT:    j .LBB61_697
+; CHECK-RV32-NEXT:  .LBB61_176: # %else654
+; CHECK-RV32-NEXT:    andi a3, a2, 32
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_177
+; CHECK-RV32-NEXT:    j .LBB61_698
+; CHECK-RV32-NEXT:  .LBB61_177: # %else658
+; CHECK-RV32-NEXT:    andi a3, a2, 64
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_178
+; CHECK-RV32-NEXT:    j .LBB61_699
+; CHECK-RV32-NEXT:  .LBB61_178: # %else662
+; CHECK-RV32-NEXT:    andi a3, a2, 128
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_179
+; CHECK-RV32-NEXT:    j .LBB61_700
+; CHECK-RV32-NEXT:  .LBB61_179: # %else666
+; CHECK-RV32-NEXT:    andi a3, a2, 256
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_180
+; CHECK-RV32-NEXT:    j .LBB61_701
+; CHECK-RV32-NEXT:  .LBB61_180: # %else670
+; CHECK-RV32-NEXT:    andi a3, a2, 512
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_181
+; CHECK-RV32-NEXT:    j .LBB61_702
+; CHECK-RV32-NEXT:  .LBB61_181: # %else674
+; CHECK-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_182
+; CHECK-RV32-NEXT:    j .LBB61_703
+; CHECK-RV32-NEXT:  .LBB61_182: # %else678
+; CHECK-RV32-NEXT:    slli a3, a2, 20
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_183
+; CHECK-RV32-NEXT:    j .LBB61_704
+; CHECK-RV32-NEXT:  .LBB61_183: # %else682
+; CHECK-RV32-NEXT:    slli a3, a2, 19
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_184
+; CHECK-RV32-NEXT:    j .LBB61_705
+; CHECK-RV32-NEXT:  .LBB61_184: # %else686
+; CHECK-RV32-NEXT:    slli a3, a2, 18
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_185
+; CHECK-RV32-NEXT:    j .LBB61_706
+; CHECK-RV32-NEXT:  .LBB61_185: # %else690
+; CHECK-RV32-NEXT:    slli a3, a2, 17
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_186
+; CHECK-RV32-NEXT:    j .LBB61_707
+; CHECK-RV32-NEXT:  .LBB61_186: # %else694
+; CHECK-RV32-NEXT:    slli a3, a2, 16
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_187
+; CHECK-RV32-NEXT:    j .LBB61_708
+; CHECK-RV32-NEXT:  .LBB61_187: # %else698
+; CHECK-RV32-NEXT:    slli a3, a2, 15
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_188
+; CHECK-RV32-NEXT:    j .LBB61_709
+; CHECK-RV32-NEXT:  .LBB61_188: # %else702
+; CHECK-RV32-NEXT:    slli a3, a2, 14
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_189
+; CHECK-RV32-NEXT:    j .LBB61_710
+; CHECK-RV32-NEXT:  .LBB61_189: # %else706
+; CHECK-RV32-NEXT:    slli a3, a2, 13
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_190
+; CHECK-RV32-NEXT:    j .LBB61_711
+; CHECK-RV32-NEXT:  .LBB61_190: # %else710
+; CHECK-RV32-NEXT:    slli a3, a2, 12
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_191
+; CHECK-RV32-NEXT:    j .LBB61_712
+; CHECK-RV32-NEXT:  .LBB61_191: # %else714
+; CHECK-RV32-NEXT:    slli a3, a2, 11
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_192
+; CHECK-RV32-NEXT:    j .LBB61_713
+; CHECK-RV32-NEXT:  .LBB61_192: # %else718
+; CHECK-RV32-NEXT:    slli a3, a2, 10
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_193
+; CHECK-RV32-NEXT:    j .LBB61_714
+; CHECK-RV32-NEXT:  .LBB61_193: # %else722
+; CHECK-RV32-NEXT:    slli a3, a2, 9
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_194
+; CHECK-RV32-NEXT:    j .LBB61_715
+; CHECK-RV32-NEXT:  .LBB61_194: # %else726
+; CHECK-RV32-NEXT:    slli a3, a2, 8
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_195
+; CHECK-RV32-NEXT:    j .LBB61_716
+; CHECK-RV32-NEXT:  .LBB61_195: # %else730
+; CHECK-RV32-NEXT:    slli a3, a2, 7
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_196
+; CHECK-RV32-NEXT:    j .LBB61_717
+; CHECK-RV32-NEXT:  .LBB61_196: # %else734
+; CHECK-RV32-NEXT:    slli a3, a2, 6
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_197
+; CHECK-RV32-NEXT:    j .LBB61_718
+; CHECK-RV32-NEXT:  .LBB61_197: # %else738
+; CHECK-RV32-NEXT:    slli a3, a2, 5
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_198
+; CHECK-RV32-NEXT:    j .LBB61_719
+; CHECK-RV32-NEXT:  .LBB61_198: # %else742
+; CHECK-RV32-NEXT:    slli a3, a2, 4
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_199
+; CHECK-RV32-NEXT:    j .LBB61_720
+; CHECK-RV32-NEXT:  .LBB61_199: # %else746
+; CHECK-RV32-NEXT:    slli a3, a2, 3
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_200
+; CHECK-RV32-NEXT:    j .LBB61_721
+; CHECK-RV32-NEXT:  .LBB61_200: # %else750
+; CHECK-RV32-NEXT:    slli a3, a2, 2
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_202
+; CHECK-RV32-NEXT:  .LBB61_201: # %cond.load753
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 190
+; CHECK-RV32-NEXT:    li a4, 189
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:  .LBB61_202: # %else754
+; CHECK-RV32-NEXT:    slli a3, a2, 1
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vslidedown.vi v16, v0, 3
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_204
+; CHECK-RV32-NEXT:  # %bb.203: # %cond.load757
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v20, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 191
+; CHECK-RV32-NEXT:    li a4, 190
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:  .LBB61_204: # %else758
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.x.s a3, v16
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_205
+; CHECK-RV32-NEXT:    j .LBB61_722
+; CHECK-RV32-NEXT:  .LBB61_205: # %else762
+; CHECK-RV32-NEXT:    andi a2, a3, 1
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_206
+; CHECK-RV32-NEXT:    j .LBB61_723
+; CHECK-RV32-NEXT:  .LBB61_206: # %else766
+; CHECK-RV32-NEXT:    andi a2, a3, 2
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_207
+; CHECK-RV32-NEXT:    j .LBB61_724
+; CHECK-RV32-NEXT:  .LBB61_207: # %else770
+; CHECK-RV32-NEXT:    andi a2, a3, 4
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_208
+; CHECK-RV32-NEXT:    j .LBB61_725
+; CHECK-RV32-NEXT:  .LBB61_208: # %else774
+; CHECK-RV32-NEXT:    andi a2, a3, 8
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_209
+; CHECK-RV32-NEXT:    j .LBB61_726
+; CHECK-RV32-NEXT:  .LBB61_209: # %else778
+; CHECK-RV32-NEXT:    andi a2, a3, 16
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_210
+; CHECK-RV32-NEXT:    j .LBB61_727
+; CHECK-RV32-NEXT:  .LBB61_210: # %else782
+; CHECK-RV32-NEXT:    andi a2, a3, 32
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_211
+; CHECK-RV32-NEXT:    j .LBB61_728
+; CHECK-RV32-NEXT:  .LBB61_211: # %else786
+; CHECK-RV32-NEXT:    andi a2, a3, 64
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_212
+; CHECK-RV32-NEXT:    j .LBB61_729
+; CHECK-RV32-NEXT:  .LBB61_212: # %else790
+; CHECK-RV32-NEXT:    andi a2, a3, 128
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_213
+; CHECK-RV32-NEXT:    j .LBB61_730
+; CHECK-RV32-NEXT:  .LBB61_213: # %else794
+; CHECK-RV32-NEXT:    andi a2, a3, 256
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_214
+; CHECK-RV32-NEXT:    j .LBB61_731
+; CHECK-RV32-NEXT:  .LBB61_214: # %else798
+; CHECK-RV32-NEXT:    andi a2, a3, 512
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_215
+; CHECK-RV32-NEXT:    j .LBB61_732
+; CHECK-RV32-NEXT:  .LBB61_215: # %else802
+; CHECK-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_216
+; CHECK-RV32-NEXT:    j .LBB61_733
+; CHECK-RV32-NEXT:  .LBB61_216: # %else806
+; CHECK-RV32-NEXT:    slli a2, a3, 20
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_217
+; CHECK-RV32-NEXT:    j .LBB61_734
+; CHECK-RV32-NEXT:  .LBB61_217: # %else810
+; CHECK-RV32-NEXT:    slli a2, a3, 19
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_218
+; CHECK-RV32-NEXT:    j .LBB61_735
+; CHECK-RV32-NEXT:  .LBB61_218: # %else814
+; CHECK-RV32-NEXT:    slli a2, a3, 18
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_219
+; CHECK-RV32-NEXT:    j .LBB61_736
+; CHECK-RV32-NEXT:  .LBB61_219: # %else818
+; CHECK-RV32-NEXT:    slli a2, a3, 17
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_220
+; CHECK-RV32-NEXT:    j .LBB61_737
+; CHECK-RV32-NEXT:  .LBB61_220: # %else822
+; CHECK-RV32-NEXT:    slli a2, a3, 16
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_221
+; CHECK-RV32-NEXT:    j .LBB61_738
+; CHECK-RV32-NEXT:  .LBB61_221: # %else826
+; CHECK-RV32-NEXT:    slli a2, a3, 15
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_222
+; CHECK-RV32-NEXT:    j .LBB61_739
+; CHECK-RV32-NEXT:  .LBB61_222: # %else830
+; CHECK-RV32-NEXT:    slli a2, a3, 14
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_223
+; CHECK-RV32-NEXT:    j .LBB61_740
+; CHECK-RV32-NEXT:  .LBB61_223: # %else834
+; CHECK-RV32-NEXT:    slli a2, a3, 13
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_224
+; CHECK-RV32-NEXT:    j .LBB61_741
+; CHECK-RV32-NEXT:  .LBB61_224: # %else838
+; CHECK-RV32-NEXT:    slli a2, a3, 12
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_225
+; CHECK-RV32-NEXT:    j .LBB61_742
+; CHECK-RV32-NEXT:  .LBB61_225: # %else842
+; CHECK-RV32-NEXT:    slli a2, a3, 11
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_226
+; CHECK-RV32-NEXT:    j .LBB61_743
+; CHECK-RV32-NEXT:  .LBB61_226: # %else846
+; CHECK-RV32-NEXT:    slli a2, a3, 10
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_227
+; CHECK-RV32-NEXT:    j .LBB61_744
+; CHECK-RV32-NEXT:  .LBB61_227: # %else850
+; CHECK-RV32-NEXT:    slli a2, a3, 9
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_228
+; CHECK-RV32-NEXT:    j .LBB61_745
+; CHECK-RV32-NEXT:  .LBB61_228: # %else854
+; CHECK-RV32-NEXT:    slli a2, a3, 8
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_229
+; CHECK-RV32-NEXT:    j .LBB61_746
+; CHECK-RV32-NEXT:  .LBB61_229: # %else858
+; CHECK-RV32-NEXT:    slli a2, a3, 7
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_230
+; CHECK-RV32-NEXT:    j .LBB61_747
+; CHECK-RV32-NEXT:  .LBB61_230: # %else862
+; CHECK-RV32-NEXT:    slli a2, a3, 6
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_231
+; CHECK-RV32-NEXT:    j .LBB61_748
+; CHECK-RV32-NEXT:  .LBB61_231: # %else866
+; CHECK-RV32-NEXT:    slli a2, a3, 5
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_232
+; CHECK-RV32-NEXT:    j .LBB61_749
+; CHECK-RV32-NEXT:  .LBB61_232: # %else870
+; CHECK-RV32-NEXT:    slli a2, a3, 4
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_233
+; CHECK-RV32-NEXT:    j .LBB61_750
+; CHECK-RV32-NEXT:  .LBB61_233: # %else874
+; CHECK-RV32-NEXT:    slli a2, a3, 3
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_234
+; CHECK-RV32-NEXT:    j .LBB61_751
+; CHECK-RV32-NEXT:  .LBB61_234: # %else878
+; CHECK-RV32-NEXT:    slli a2, a3, 2
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_236
+; CHECK-RV32-NEXT:  .LBB61_235: # %cond.load881
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 222
+; CHECK-RV32-NEXT:    li a4, 221
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:  .LBB61_236: # %else882
+; CHECK-RV32-NEXT:    slli a2, a3, 1
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vsrl.vx v16, v16, a1
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_238
+; CHECK-RV32-NEXT:  # %bb.237: # %cond.load885
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 223
+; CHECK-RV32-NEXT:    li a4, 222
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:  .LBB61_238: # %else886
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.x.s a2, v16
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_239
+; CHECK-RV32-NEXT:    j .LBB61_752
+; CHECK-RV32-NEXT:  .LBB61_239: # %else890
+; CHECK-RV32-NEXT:    andi a3, a2, 1
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_240
+; CHECK-RV32-NEXT:    j .LBB61_753
+; CHECK-RV32-NEXT:  .LBB61_240: # %else894
+; CHECK-RV32-NEXT:    andi a3, a2, 2
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_241
+; CHECK-RV32-NEXT:    j .LBB61_754
+; CHECK-RV32-NEXT:  .LBB61_241: # %else898
+; CHECK-RV32-NEXT:    andi a3, a2, 4
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_242
+; CHECK-RV32-NEXT:    j .LBB61_755
+; CHECK-RV32-NEXT:  .LBB61_242: # %else902
+; CHECK-RV32-NEXT:    andi a3, a2, 8
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_243
+; CHECK-RV32-NEXT:    j .LBB61_756
+; CHECK-RV32-NEXT:  .LBB61_243: # %else906
+; CHECK-RV32-NEXT:    andi a3, a2, 16
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_244
+; CHECK-RV32-NEXT:    j .LBB61_757
+; CHECK-RV32-NEXT:  .LBB61_244: # %else910
+; CHECK-RV32-NEXT:    andi a3, a2, 32
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_245
+; CHECK-RV32-NEXT:    j .LBB61_758
+; CHECK-RV32-NEXT:  .LBB61_245: # %else914
+; CHECK-RV32-NEXT:    andi a3, a2, 64
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_246
+; CHECK-RV32-NEXT:    j .LBB61_759
+; CHECK-RV32-NEXT:  .LBB61_246: # %else918
+; CHECK-RV32-NEXT:    andi a3, a2, 128
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_247
+; CHECK-RV32-NEXT:    j .LBB61_760
+; CHECK-RV32-NEXT:  .LBB61_247: # %else922
+; CHECK-RV32-NEXT:    andi a3, a2, 256
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_248
+; CHECK-RV32-NEXT:    j .LBB61_761
+; CHECK-RV32-NEXT:  .LBB61_248: # %else926
+; CHECK-RV32-NEXT:    andi a3, a2, 512
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_249
+; CHECK-RV32-NEXT:    j .LBB61_762
+; CHECK-RV32-NEXT:  .LBB61_249: # %else930
+; CHECK-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_250
+; CHECK-RV32-NEXT:    j .LBB61_763
+; CHECK-RV32-NEXT:  .LBB61_250: # %else934
+; CHECK-RV32-NEXT:    slli a3, a2, 20
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_251
+; CHECK-RV32-NEXT:    j .LBB61_764
+; CHECK-RV32-NEXT:  .LBB61_251: # %else938
+; CHECK-RV32-NEXT:    slli a3, a2, 19
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_252
+; CHECK-RV32-NEXT:    j .LBB61_765
+; CHECK-RV32-NEXT:  .LBB61_252: # %else942
+; CHECK-RV32-NEXT:    slli a3, a2, 18
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_253
+; CHECK-RV32-NEXT:    j .LBB61_766
+; CHECK-RV32-NEXT:  .LBB61_253: # %else946
+; CHECK-RV32-NEXT:    slli a3, a2, 17
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_254
+; CHECK-RV32-NEXT:    j .LBB61_767
+; CHECK-RV32-NEXT:  .LBB61_254: # %else950
+; CHECK-RV32-NEXT:    slli a3, a2, 16
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_255
+; CHECK-RV32-NEXT:    j .LBB61_768
+; CHECK-RV32-NEXT:  .LBB61_255: # %else954
+; CHECK-RV32-NEXT:    slli a3, a2, 15
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_256
+; CHECK-RV32-NEXT:    j .LBB61_769
+; CHECK-RV32-NEXT:  .LBB61_256: # %else958
+; CHECK-RV32-NEXT:    slli a3, a2, 14
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_257
+; CHECK-RV32-NEXT:    j .LBB61_770
+; CHECK-RV32-NEXT:  .LBB61_257: # %else962
+; CHECK-RV32-NEXT:    slli a3, a2, 13
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_258
+; CHECK-RV32-NEXT:    j .LBB61_771
+; CHECK-RV32-NEXT:  .LBB61_258: # %else966
+; CHECK-RV32-NEXT:    slli a3, a2, 12
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_259
+; CHECK-RV32-NEXT:    j .LBB61_772
+; CHECK-RV32-NEXT:  .LBB61_259: # %else970
+; CHECK-RV32-NEXT:    slli a3, a2, 11
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_260
+; CHECK-RV32-NEXT:    j .LBB61_773
+; CHECK-RV32-NEXT:  .LBB61_260: # %else974
+; CHECK-RV32-NEXT:    slli a3, a2, 10
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_261
+; CHECK-RV32-NEXT:    j .LBB61_774
+; CHECK-RV32-NEXT:  .LBB61_261: # %else978
+; CHECK-RV32-NEXT:    slli a3, a2, 9
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_262
+; CHECK-RV32-NEXT:    j .LBB61_775
+; CHECK-RV32-NEXT:  .LBB61_262: # %else982
+; CHECK-RV32-NEXT:    slli a3, a2, 8
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_263
+; CHECK-RV32-NEXT:    j .LBB61_776
+; CHECK-RV32-NEXT:  .LBB61_263: # %else986
+; CHECK-RV32-NEXT:    slli a3, a2, 7
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_264
+; CHECK-RV32-NEXT:    j .LBB61_777
+; CHECK-RV32-NEXT:  .LBB61_264: # %else990
+; CHECK-RV32-NEXT:    slli a3, a2, 6
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_265
+; CHECK-RV32-NEXT:    j .LBB61_778
+; CHECK-RV32-NEXT:  .LBB61_265: # %else994
+; CHECK-RV32-NEXT:    slli a3, a2, 5
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_266
+; CHECK-RV32-NEXT:    j .LBB61_779
+; CHECK-RV32-NEXT:  .LBB61_266: # %else998
+; CHECK-RV32-NEXT:    slli a3, a2, 4
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_267
+; CHECK-RV32-NEXT:    j .LBB61_780
+; CHECK-RV32-NEXT:  .LBB61_267: # %else1002
+; CHECK-RV32-NEXT:    slli a3, a2, 3
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_268
+; CHECK-RV32-NEXT:    j .LBB61_781
+; CHECK-RV32-NEXT:  .LBB61_268: # %else1006
+; CHECK-RV32-NEXT:    slli a3, a2, 2
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_270
+; CHECK-RV32-NEXT:  .LBB61_269: # %cond.load1009
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 254
+; CHECK-RV32-NEXT:    li a4, 253
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:  .LBB61_270: # %else1010
+; CHECK-RV32-NEXT:    slli a3, a2, 1
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vslidedown.vi v16, v0, 4
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_272
+; CHECK-RV32-NEXT:  # %bb.271: # %cond.load1013
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v20, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 255
+; CHECK-RV32-NEXT:    li a4, 254
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:  .LBB61_272: # %else1014
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.x.s a3, v16
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_273
+; CHECK-RV32-NEXT:    j .LBB61_782
+; CHECK-RV32-NEXT:  .LBB61_273: # %else1018
+; CHECK-RV32-NEXT:    andi a2, a3, 1
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_274
+; CHECK-RV32-NEXT:    j .LBB61_783
+; CHECK-RV32-NEXT:  .LBB61_274: # %else1022
+; CHECK-RV32-NEXT:    andi a2, a3, 2
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_275
+; CHECK-RV32-NEXT:    j .LBB61_784
+; CHECK-RV32-NEXT:  .LBB61_275: # %else1026
+; CHECK-RV32-NEXT:    andi a2, a3, 4
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_276
+; CHECK-RV32-NEXT:    j .LBB61_785
+; CHECK-RV32-NEXT:  .LBB61_276: # %else1030
+; CHECK-RV32-NEXT:    andi a2, a3, 8
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_277
+; CHECK-RV32-NEXT:    j .LBB61_786
+; CHECK-RV32-NEXT:  .LBB61_277: # %else1034
+; CHECK-RV32-NEXT:    andi a2, a3, 16
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_278
+; CHECK-RV32-NEXT:    j .LBB61_787
+; CHECK-RV32-NEXT:  .LBB61_278: # %else1038
+; CHECK-RV32-NEXT:    andi a2, a3, 32
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_279
+; CHECK-RV32-NEXT:    j .LBB61_788
+; CHECK-RV32-NEXT:  .LBB61_279: # %else1042
+; CHECK-RV32-NEXT:    andi a2, a3, 64
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_280
+; CHECK-RV32-NEXT:    j .LBB61_789
+; CHECK-RV32-NEXT:  .LBB61_280: # %else1046
+; CHECK-RV32-NEXT:    andi a2, a3, 128
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_281
+; CHECK-RV32-NEXT:    j .LBB61_790
+; CHECK-RV32-NEXT:  .LBB61_281: # %else1050
+; CHECK-RV32-NEXT:    andi a2, a3, 256
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_282
+; CHECK-RV32-NEXT:    j .LBB61_791
+; CHECK-RV32-NEXT:  .LBB61_282: # %else1054
+; CHECK-RV32-NEXT:    andi a2, a3, 512
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_283
+; CHECK-RV32-NEXT:    j .LBB61_792
+; CHECK-RV32-NEXT:  .LBB61_283: # %else1058
+; CHECK-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_284
+; CHECK-RV32-NEXT:    j .LBB61_793
+; CHECK-RV32-NEXT:  .LBB61_284: # %else1062
+; CHECK-RV32-NEXT:    slli a2, a3, 20
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_285
+; CHECK-RV32-NEXT:    j .LBB61_794
+; CHECK-RV32-NEXT:  .LBB61_285: # %else1066
+; CHECK-RV32-NEXT:    slli a2, a3, 19
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_286
+; CHECK-RV32-NEXT:    j .LBB61_795
+; CHECK-RV32-NEXT:  .LBB61_286: # %else1070
+; CHECK-RV32-NEXT:    slli a2, a3, 18
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_287
+; CHECK-RV32-NEXT:    j .LBB61_796
+; CHECK-RV32-NEXT:  .LBB61_287: # %else1074
+; CHECK-RV32-NEXT:    slli a2, a3, 17
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_288
+; CHECK-RV32-NEXT:    j .LBB61_797
+; CHECK-RV32-NEXT:  .LBB61_288: # %else1078
+; CHECK-RV32-NEXT:    slli a2, a3, 16
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_289
+; CHECK-RV32-NEXT:    j .LBB61_798
+; CHECK-RV32-NEXT:  .LBB61_289: # %else1082
+; CHECK-RV32-NEXT:    slli a2, a3, 15
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_290
+; CHECK-RV32-NEXT:    j .LBB61_799
+; CHECK-RV32-NEXT:  .LBB61_290: # %else1086
+; CHECK-RV32-NEXT:    slli a2, a3, 14
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_291
+; CHECK-RV32-NEXT:    j .LBB61_800
+; CHECK-RV32-NEXT:  .LBB61_291: # %else1090
+; CHECK-RV32-NEXT:    slli a2, a3, 13
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_292
+; CHECK-RV32-NEXT:    j .LBB61_801
+; CHECK-RV32-NEXT:  .LBB61_292: # %else1094
+; CHECK-RV32-NEXT:    slli a2, a3, 12
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_293
+; CHECK-RV32-NEXT:    j .LBB61_802
+; CHECK-RV32-NEXT:  .LBB61_293: # %else1098
+; CHECK-RV32-NEXT:    slli a2, a3, 11
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_294
+; CHECK-RV32-NEXT:    j .LBB61_803
+; CHECK-RV32-NEXT:  .LBB61_294: # %else1102
+; CHECK-RV32-NEXT:    slli a2, a3, 10
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_295
+; CHECK-RV32-NEXT:    j .LBB61_804
+; CHECK-RV32-NEXT:  .LBB61_295: # %else1106
+; CHECK-RV32-NEXT:    slli a2, a3, 9
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_296
+; CHECK-RV32-NEXT:    j .LBB61_805
+; CHECK-RV32-NEXT:  .LBB61_296: # %else1110
+; CHECK-RV32-NEXT:    slli a2, a3, 8
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_297
+; CHECK-RV32-NEXT:    j .LBB61_806
+; CHECK-RV32-NEXT:  .LBB61_297: # %else1114
+; CHECK-RV32-NEXT:    slli a2, a3, 7
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_298
+; CHECK-RV32-NEXT:    j .LBB61_807
+; CHECK-RV32-NEXT:  .LBB61_298: # %else1118
+; CHECK-RV32-NEXT:    slli a2, a3, 6
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_299
+; CHECK-RV32-NEXT:    j .LBB61_808
+; CHECK-RV32-NEXT:  .LBB61_299: # %else1122
+; CHECK-RV32-NEXT:    slli a2, a3, 5
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_300
+; CHECK-RV32-NEXT:    j .LBB61_809
+; CHECK-RV32-NEXT:  .LBB61_300: # %else1126
+; CHECK-RV32-NEXT:    slli a2, a3, 4
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_301
+; CHECK-RV32-NEXT:    j .LBB61_810
+; CHECK-RV32-NEXT:  .LBB61_301: # %else1130
+; CHECK-RV32-NEXT:    slli a2, a3, 3
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_302
+; CHECK-RV32-NEXT:    j .LBB61_811
+; CHECK-RV32-NEXT:  .LBB61_302: # %else1134
+; CHECK-RV32-NEXT:    slli a2, a3, 2
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_304
+; CHECK-RV32-NEXT:  .LBB61_303: # %cond.load1137
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 286
+; CHECK-RV32-NEXT:    li a4, 285
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:  .LBB61_304: # %else1138
+; CHECK-RV32-NEXT:    slli a2, a3, 1
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vsrl.vx v16, v16, a1
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_306
+; CHECK-RV32-NEXT:  # %bb.305: # %cond.load1141
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 287
+; CHECK-RV32-NEXT:    li a4, 286
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:  .LBB61_306: # %else1142
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.x.s a2, v16
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_307
+; CHECK-RV32-NEXT:    j .LBB61_812
+; CHECK-RV32-NEXT:  .LBB61_307: # %else1146
+; CHECK-RV32-NEXT:    andi a3, a2, 1
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_308
+; CHECK-RV32-NEXT:    j .LBB61_813
+; CHECK-RV32-NEXT:  .LBB61_308: # %else1150
+; CHECK-RV32-NEXT:    andi a3, a2, 2
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_309
+; CHECK-RV32-NEXT:    j .LBB61_814
+; CHECK-RV32-NEXT:  .LBB61_309: # %else1154
+; CHECK-RV32-NEXT:    andi a3, a2, 4
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_310
+; CHECK-RV32-NEXT:    j .LBB61_815
+; CHECK-RV32-NEXT:  .LBB61_310: # %else1158
+; CHECK-RV32-NEXT:    andi a3, a2, 8
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_311
+; CHECK-RV32-NEXT:    j .LBB61_816
+; CHECK-RV32-NEXT:  .LBB61_311: # %else1162
+; CHECK-RV32-NEXT:    andi a3, a2, 16
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_312
+; CHECK-RV32-NEXT:    j .LBB61_817
+; CHECK-RV32-NEXT:  .LBB61_312: # %else1166
+; CHECK-RV32-NEXT:    andi a3, a2, 32
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_313
+; CHECK-RV32-NEXT:    j .LBB61_818
+; CHECK-RV32-NEXT:  .LBB61_313: # %else1170
+; CHECK-RV32-NEXT:    andi a3, a2, 64
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_314
+; CHECK-RV32-NEXT:    j .LBB61_819
+; CHECK-RV32-NEXT:  .LBB61_314: # %else1174
+; CHECK-RV32-NEXT:    andi a3, a2, 128
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_315
+; CHECK-RV32-NEXT:    j .LBB61_820
+; CHECK-RV32-NEXT:  .LBB61_315: # %else1178
+; CHECK-RV32-NEXT:    andi a3, a2, 256
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_316
+; CHECK-RV32-NEXT:    j .LBB61_821
+; CHECK-RV32-NEXT:  .LBB61_316: # %else1182
+; CHECK-RV32-NEXT:    andi a3, a2, 512
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_317
+; CHECK-RV32-NEXT:    j .LBB61_822
+; CHECK-RV32-NEXT:  .LBB61_317: # %else1186
+; CHECK-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_318
+; CHECK-RV32-NEXT:    j .LBB61_823
+; CHECK-RV32-NEXT:  .LBB61_318: # %else1190
+; CHECK-RV32-NEXT:    slli a3, a2, 20
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_319
+; CHECK-RV32-NEXT:    j .LBB61_824
+; CHECK-RV32-NEXT:  .LBB61_319: # %else1194
+; CHECK-RV32-NEXT:    slli a3, a2, 19
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_320
+; CHECK-RV32-NEXT:    j .LBB61_825
+; CHECK-RV32-NEXT:  .LBB61_320: # %else1198
+; CHECK-RV32-NEXT:    slli a3, a2, 18
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_321
+; CHECK-RV32-NEXT:    j .LBB61_826
+; CHECK-RV32-NEXT:  .LBB61_321: # %else1202
+; CHECK-RV32-NEXT:    slli a3, a2, 17
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_322
+; CHECK-RV32-NEXT:    j .LBB61_827
+; CHECK-RV32-NEXT:  .LBB61_322: # %else1206
+; CHECK-RV32-NEXT:    slli a3, a2, 16
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_323
+; CHECK-RV32-NEXT:    j .LBB61_828
+; CHECK-RV32-NEXT:  .LBB61_323: # %else1210
+; CHECK-RV32-NEXT:    slli a3, a2, 15
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_324
+; CHECK-RV32-NEXT:    j .LBB61_829
+; CHECK-RV32-NEXT:  .LBB61_324: # %else1214
+; CHECK-RV32-NEXT:    slli a3, a2, 14
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_325
+; CHECK-RV32-NEXT:    j .LBB61_830
+; CHECK-RV32-NEXT:  .LBB61_325: # %else1218
+; CHECK-RV32-NEXT:    slli a3, a2, 13
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_326
+; CHECK-RV32-NEXT:    j .LBB61_831
+; CHECK-RV32-NEXT:  .LBB61_326: # %else1222
+; CHECK-RV32-NEXT:    slli a3, a2, 12
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_327
+; CHECK-RV32-NEXT:    j .LBB61_832
+; CHECK-RV32-NEXT:  .LBB61_327: # %else1226
+; CHECK-RV32-NEXT:    slli a3, a2, 11
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_328
+; CHECK-RV32-NEXT:    j .LBB61_833
+; CHECK-RV32-NEXT:  .LBB61_328: # %else1230
+; CHECK-RV32-NEXT:    slli a3, a2, 10
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_329
+; CHECK-RV32-NEXT:    j .LBB61_834
+; CHECK-RV32-NEXT:  .LBB61_329: # %else1234
+; CHECK-RV32-NEXT:    slli a3, a2, 9
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_330
+; CHECK-RV32-NEXT:    j .LBB61_835
+; CHECK-RV32-NEXT:  .LBB61_330: # %else1238
+; CHECK-RV32-NEXT:    slli a3, a2, 8
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_331
+; CHECK-RV32-NEXT:    j .LBB61_836
+; CHECK-RV32-NEXT:  .LBB61_331: # %else1242
+; CHECK-RV32-NEXT:    slli a3, a2, 7
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_332
+; CHECK-RV32-NEXT:    j .LBB61_837
+; CHECK-RV32-NEXT:  .LBB61_332: # %else1246
+; CHECK-RV32-NEXT:    slli a3, a2, 6
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_333
+; CHECK-RV32-NEXT:    j .LBB61_838
+; CHECK-RV32-NEXT:  .LBB61_333: # %else1250
+; CHECK-RV32-NEXT:    slli a3, a2, 5
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_334
+; CHECK-RV32-NEXT:    j .LBB61_839
+; CHECK-RV32-NEXT:  .LBB61_334: # %else1254
+; CHECK-RV32-NEXT:    slli a3, a2, 4
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_335
+; CHECK-RV32-NEXT:    j .LBB61_840
+; CHECK-RV32-NEXT:  .LBB61_335: # %else1258
+; CHECK-RV32-NEXT:    slli a3, a2, 3
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_336
+; CHECK-RV32-NEXT:    j .LBB61_841
+; CHECK-RV32-NEXT:  .LBB61_336: # %else1262
+; CHECK-RV32-NEXT:    slli a3, a2, 2
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_338
+; CHECK-RV32-NEXT:  .LBB61_337: # %cond.load1265
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 318
+; CHECK-RV32-NEXT:    li a4, 317
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:  .LBB61_338: # %else1266
+; CHECK-RV32-NEXT:    slli a3, a2, 1
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vslidedown.vi v16, v0, 5
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_340
+; CHECK-RV32-NEXT:  # %bb.339: # %cond.load1269
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    li a3, 319
+; CHECK-RV32-NEXT:    li a4, 318
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:  .LBB61_340: # %else1270
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.x.s a3, v16
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_341
+; CHECK-RV32-NEXT:    j .LBB61_842
+; CHECK-RV32-NEXT:  .LBB61_341: # %else1274
+; CHECK-RV32-NEXT:    andi a2, a3, 1
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_342
+; CHECK-RV32-NEXT:    j .LBB61_843
+; CHECK-RV32-NEXT:  .LBB61_342: # %else1278
+; CHECK-RV32-NEXT:    andi a2, a3, 2
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_343
+; CHECK-RV32-NEXT:    j .LBB61_844
+; CHECK-RV32-NEXT:  .LBB61_343: # %else1282
+; CHECK-RV32-NEXT:    andi a2, a3, 4
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_344
+; CHECK-RV32-NEXT:    j .LBB61_845
+; CHECK-RV32-NEXT:  .LBB61_344: # %else1286
+; CHECK-RV32-NEXT:    andi a2, a3, 8
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_345
+; CHECK-RV32-NEXT:    j .LBB61_846
+; CHECK-RV32-NEXT:  .LBB61_345: # %else1290
+; CHECK-RV32-NEXT:    andi a2, a3, 16
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_346
+; CHECK-RV32-NEXT:    j .LBB61_847
+; CHECK-RV32-NEXT:  .LBB61_346: # %else1294
+; CHECK-RV32-NEXT:    andi a2, a3, 32
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_347
+; CHECK-RV32-NEXT:    j .LBB61_848
+; CHECK-RV32-NEXT:  .LBB61_347: # %else1298
+; CHECK-RV32-NEXT:    andi a2, a3, 64
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_348
+; CHECK-RV32-NEXT:    j .LBB61_849
+; CHECK-RV32-NEXT:  .LBB61_348: # %else1302
+; CHECK-RV32-NEXT:    andi a2, a3, 128
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_349
+; CHECK-RV32-NEXT:    j .LBB61_850
+; CHECK-RV32-NEXT:  .LBB61_349: # %else1306
+; CHECK-RV32-NEXT:    andi a2, a3, 256
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_350
+; CHECK-RV32-NEXT:    j .LBB61_851
+; CHECK-RV32-NEXT:  .LBB61_350: # %else1310
+; CHECK-RV32-NEXT:    andi a2, a3, 512
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_351
+; CHECK-RV32-NEXT:    j .LBB61_852
+; CHECK-RV32-NEXT:  .LBB61_351: # %else1314
+; CHECK-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_352
+; CHECK-RV32-NEXT:    j .LBB61_853
+; CHECK-RV32-NEXT:  .LBB61_352: # %else1318
+; CHECK-RV32-NEXT:    slli a2, a3, 20
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_353
+; CHECK-RV32-NEXT:    j .LBB61_854
+; CHECK-RV32-NEXT:  .LBB61_353: # %else1322
+; CHECK-RV32-NEXT:    slli a2, a3, 19
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_354
+; CHECK-RV32-NEXT:    j .LBB61_855
+; CHECK-RV32-NEXT:  .LBB61_354: # %else1326
+; CHECK-RV32-NEXT:    slli a2, a3, 18
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_355
+; CHECK-RV32-NEXT:    j .LBB61_856
+; CHECK-RV32-NEXT:  .LBB61_355: # %else1330
+; CHECK-RV32-NEXT:    slli a2, a3, 17
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_356
+; CHECK-RV32-NEXT:    j .LBB61_857
+; CHECK-RV32-NEXT:  .LBB61_356: # %else1334
+; CHECK-RV32-NEXT:    slli a2, a3, 16
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_357
+; CHECK-RV32-NEXT:    j .LBB61_858
+; CHECK-RV32-NEXT:  .LBB61_357: # %else1338
+; CHECK-RV32-NEXT:    slli a2, a3, 15
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_358
+; CHECK-RV32-NEXT:    j .LBB61_859
+; CHECK-RV32-NEXT:  .LBB61_358: # %else1342
+; CHECK-RV32-NEXT:    slli a2, a3, 14
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_359
+; CHECK-RV32-NEXT:    j .LBB61_860
+; CHECK-RV32-NEXT:  .LBB61_359: # %else1346
+; CHECK-RV32-NEXT:    slli a2, a3, 13
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_360
+; CHECK-RV32-NEXT:    j .LBB61_861
+; CHECK-RV32-NEXT:  .LBB61_360: # %else1350
+; CHECK-RV32-NEXT:    slli a2, a3, 12
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_361
+; CHECK-RV32-NEXT:    j .LBB61_862
+; CHECK-RV32-NEXT:  .LBB61_361: # %else1354
+; CHECK-RV32-NEXT:    slli a2, a3, 11
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_362
+; CHECK-RV32-NEXT:    j .LBB61_863
+; CHECK-RV32-NEXT:  .LBB61_362: # %else1358
+; CHECK-RV32-NEXT:    slli a2, a3, 10
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_363
+; CHECK-RV32-NEXT:    j .LBB61_864
+; CHECK-RV32-NEXT:  .LBB61_363: # %else1362
+; CHECK-RV32-NEXT:    slli a2, a3, 9
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_364
+; CHECK-RV32-NEXT:    j .LBB61_865
+; CHECK-RV32-NEXT:  .LBB61_364: # %else1366
+; CHECK-RV32-NEXT:    slli a2, a3, 8
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_365
+; CHECK-RV32-NEXT:    j .LBB61_866
+; CHECK-RV32-NEXT:  .LBB61_365: # %else1370
+; CHECK-RV32-NEXT:    slli a2, a3, 7
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_366
+; CHECK-RV32-NEXT:    j .LBB61_867
+; CHECK-RV32-NEXT:  .LBB61_366: # %else1374
+; CHECK-RV32-NEXT:    slli a2, a3, 6
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_367
+; CHECK-RV32-NEXT:    j .LBB61_868
+; CHECK-RV32-NEXT:  .LBB61_367: # %else1378
+; CHECK-RV32-NEXT:    slli a2, a3, 5
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_368
+; CHECK-RV32-NEXT:    j .LBB61_869
+; CHECK-RV32-NEXT:  .LBB61_368: # %else1382
+; CHECK-RV32-NEXT:    slli a2, a3, 4
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_369
+; CHECK-RV32-NEXT:    j .LBB61_870
+; CHECK-RV32-NEXT:  .LBB61_369: # %else1386
+; CHECK-RV32-NEXT:    slli a2, a3, 3
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_370
+; CHECK-RV32-NEXT:    j .LBB61_871
+; CHECK-RV32-NEXT:  .LBB61_370: # %else1390
+; CHECK-RV32-NEXT:    slli a2, a3, 2
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_372
+; CHECK-RV32-NEXT:  .LBB61_371: # %cond.load1393
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 350
+; CHECK-RV32-NEXT:    li a4, 349
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:  .LBB61_372: # %else1394
+; CHECK-RV32-NEXT:    slli a2, a3, 1
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vsrl.vx v16, v16, a1
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_374
+; CHECK-RV32-NEXT:  # %bb.373: # %cond.load1397
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 351
+; CHECK-RV32-NEXT:    li a4, 350
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:  .LBB61_374: # %else1398
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.x.s a2, v16
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_375
+; CHECK-RV32-NEXT:    j .LBB61_872
+; CHECK-RV32-NEXT:  .LBB61_375: # %else1402
+; CHECK-RV32-NEXT:    andi a3, a2, 1
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_376
+; CHECK-RV32-NEXT:    j .LBB61_873
+; CHECK-RV32-NEXT:  .LBB61_376: # %else1406
+; CHECK-RV32-NEXT:    andi a3, a2, 2
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_377
+; CHECK-RV32-NEXT:    j .LBB61_874
+; CHECK-RV32-NEXT:  .LBB61_377: # %else1410
+; CHECK-RV32-NEXT:    andi a3, a2, 4
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_378
+; CHECK-RV32-NEXT:    j .LBB61_875
+; CHECK-RV32-NEXT:  .LBB61_378: # %else1414
+; CHECK-RV32-NEXT:    andi a3, a2, 8
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_379
+; CHECK-RV32-NEXT:    j .LBB61_876
+; CHECK-RV32-NEXT:  .LBB61_379: # %else1418
+; CHECK-RV32-NEXT:    andi a3, a2, 16
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_380
+; CHECK-RV32-NEXT:    j .LBB61_877
+; CHECK-RV32-NEXT:  .LBB61_380: # %else1422
+; CHECK-RV32-NEXT:    andi a3, a2, 32
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_381
+; CHECK-RV32-NEXT:    j .LBB61_878
+; CHECK-RV32-NEXT:  .LBB61_381: # %else1426
+; CHECK-RV32-NEXT:    andi a3, a2, 64
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_382
+; CHECK-RV32-NEXT:    j .LBB61_879
+; CHECK-RV32-NEXT:  .LBB61_382: # %else1430
+; CHECK-RV32-NEXT:    andi a3, a2, 128
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_383
+; CHECK-RV32-NEXT:    j .LBB61_880
+; CHECK-RV32-NEXT:  .LBB61_383: # %else1434
+; CHECK-RV32-NEXT:    andi a3, a2, 256
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_384
+; CHECK-RV32-NEXT:    j .LBB61_881
+; CHECK-RV32-NEXT:  .LBB61_384: # %else1438
+; CHECK-RV32-NEXT:    andi a3, a2, 512
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_385
+; CHECK-RV32-NEXT:    j .LBB61_882
+; CHECK-RV32-NEXT:  .LBB61_385: # %else1442
+; CHECK-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_386
+; CHECK-RV32-NEXT:    j .LBB61_883
+; CHECK-RV32-NEXT:  .LBB61_386: # %else1446
+; CHECK-RV32-NEXT:    slli a3, a2, 20
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_387
+; CHECK-RV32-NEXT:    j .LBB61_884
+; CHECK-RV32-NEXT:  .LBB61_387: # %else1450
+; CHECK-RV32-NEXT:    slli a3, a2, 19
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_388
+; CHECK-RV32-NEXT:    j .LBB61_885
+; CHECK-RV32-NEXT:  .LBB61_388: # %else1454
+; CHECK-RV32-NEXT:    slli a3, a2, 18
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_389
+; CHECK-RV32-NEXT:    j .LBB61_886
+; CHECK-RV32-NEXT:  .LBB61_389: # %else1458
+; CHECK-RV32-NEXT:    slli a3, a2, 17
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_390
+; CHECK-RV32-NEXT:    j .LBB61_887
+; CHECK-RV32-NEXT:  .LBB61_390: # %else1462
+; CHECK-RV32-NEXT:    slli a3, a2, 16
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_391
+; CHECK-RV32-NEXT:    j .LBB61_888
+; CHECK-RV32-NEXT:  .LBB61_391: # %else1466
+; CHECK-RV32-NEXT:    slli a3, a2, 15
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_392
+; CHECK-RV32-NEXT:    j .LBB61_889
+; CHECK-RV32-NEXT:  .LBB61_392: # %else1470
+; CHECK-RV32-NEXT:    slli a3, a2, 14
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_393
+; CHECK-RV32-NEXT:    j .LBB61_890
+; CHECK-RV32-NEXT:  .LBB61_393: # %else1474
+; CHECK-RV32-NEXT:    slli a3, a2, 13
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_394
+; CHECK-RV32-NEXT:    j .LBB61_891
+; CHECK-RV32-NEXT:  .LBB61_394: # %else1478
+; CHECK-RV32-NEXT:    slli a3, a2, 12
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_395
+; CHECK-RV32-NEXT:    j .LBB61_892
+; CHECK-RV32-NEXT:  .LBB61_395: # %else1482
+; CHECK-RV32-NEXT:    slli a3, a2, 11
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_396
+; CHECK-RV32-NEXT:    j .LBB61_893
+; CHECK-RV32-NEXT:  .LBB61_396: # %else1486
+; CHECK-RV32-NEXT:    slli a3, a2, 10
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_397
+; CHECK-RV32-NEXT:    j .LBB61_894
+; CHECK-RV32-NEXT:  .LBB61_397: # %else1490
+; CHECK-RV32-NEXT:    slli a3, a2, 9
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_398
+; CHECK-RV32-NEXT:    j .LBB61_895
+; CHECK-RV32-NEXT:  .LBB61_398: # %else1494
+; CHECK-RV32-NEXT:    slli a3, a2, 8
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_399
+; CHECK-RV32-NEXT:    j .LBB61_896
+; CHECK-RV32-NEXT:  .LBB61_399: # %else1498
+; CHECK-RV32-NEXT:    slli a3, a2, 7
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_400
+; CHECK-RV32-NEXT:    j .LBB61_897
+; CHECK-RV32-NEXT:  .LBB61_400: # %else1502
+; CHECK-RV32-NEXT:    slli a3, a2, 6
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_401
+; CHECK-RV32-NEXT:    j .LBB61_898
+; CHECK-RV32-NEXT:  .LBB61_401: # %else1506
+; CHECK-RV32-NEXT:    slli a3, a2, 5
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_402
+; CHECK-RV32-NEXT:    j .LBB61_899
+; CHECK-RV32-NEXT:  .LBB61_402: # %else1510
+; CHECK-RV32-NEXT:    slli a3, a2, 4
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_403
+; CHECK-RV32-NEXT:    j .LBB61_900
+; CHECK-RV32-NEXT:  .LBB61_403: # %else1514
+; CHECK-RV32-NEXT:    slli a3, a2, 3
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_404
+; CHECK-RV32-NEXT:    j .LBB61_901
+; CHECK-RV32-NEXT:  .LBB61_404: # %else1518
+; CHECK-RV32-NEXT:    slli a3, a2, 2
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_406
+; CHECK-RV32-NEXT:  .LBB61_405: # %cond.load1521
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 382
+; CHECK-RV32-NEXT:    li a4, 381
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:  .LBB61_406: # %else1522
+; CHECK-RV32-NEXT:    slli a3, a2, 1
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vslidedown.vi v16, v0, 6
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_408
+; CHECK-RV32-NEXT:  # %bb.407: # %cond.load1525
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    li a3, 383
+; CHECK-RV32-NEXT:    li a4, 382
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:  .LBB61_408: # %else1526
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.x.s a3, v16
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_409
+; CHECK-RV32-NEXT:    j .LBB61_902
+; CHECK-RV32-NEXT:  .LBB61_409: # %else1530
+; CHECK-RV32-NEXT:    andi a2, a3, 1
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_410
+; CHECK-RV32-NEXT:    j .LBB61_903
+; CHECK-RV32-NEXT:  .LBB61_410: # %else1534
+; CHECK-RV32-NEXT:    andi a2, a3, 2
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_411
+; CHECK-RV32-NEXT:    j .LBB61_904
+; CHECK-RV32-NEXT:  .LBB61_411: # %else1538
+; CHECK-RV32-NEXT:    andi a2, a3, 4
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_412
+; CHECK-RV32-NEXT:    j .LBB61_905
+; CHECK-RV32-NEXT:  .LBB61_412: # %else1542
+; CHECK-RV32-NEXT:    andi a2, a3, 8
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_413
+; CHECK-RV32-NEXT:    j .LBB61_906
+; CHECK-RV32-NEXT:  .LBB61_413: # %else1546
+; CHECK-RV32-NEXT:    andi a2, a3, 16
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_414
+; CHECK-RV32-NEXT:    j .LBB61_907
+; CHECK-RV32-NEXT:  .LBB61_414: # %else1550
+; CHECK-RV32-NEXT:    andi a2, a3, 32
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_415
+; CHECK-RV32-NEXT:    j .LBB61_908
+; CHECK-RV32-NEXT:  .LBB61_415: # %else1554
+; CHECK-RV32-NEXT:    andi a2, a3, 64
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_416
+; CHECK-RV32-NEXT:    j .LBB61_909
+; CHECK-RV32-NEXT:  .LBB61_416: # %else1558
+; CHECK-RV32-NEXT:    andi a2, a3, 128
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_417
+; CHECK-RV32-NEXT:    j .LBB61_910
+; CHECK-RV32-NEXT:  .LBB61_417: # %else1562
+; CHECK-RV32-NEXT:    andi a2, a3, 256
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_418
+; CHECK-RV32-NEXT:    j .LBB61_911
+; CHECK-RV32-NEXT:  .LBB61_418: # %else1566
+; CHECK-RV32-NEXT:    andi a2, a3, 512
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_419
+; CHECK-RV32-NEXT:    j .LBB61_912
+; CHECK-RV32-NEXT:  .LBB61_419: # %else1570
+; CHECK-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_420
+; CHECK-RV32-NEXT:    j .LBB61_913
+; CHECK-RV32-NEXT:  .LBB61_420: # %else1574
+; CHECK-RV32-NEXT:    slli a2, a3, 20
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_421
+; CHECK-RV32-NEXT:    j .LBB61_914
+; CHECK-RV32-NEXT:  .LBB61_421: # %else1578
+; CHECK-RV32-NEXT:    slli a2, a3, 19
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_422
+; CHECK-RV32-NEXT:    j .LBB61_915
+; CHECK-RV32-NEXT:  .LBB61_422: # %else1582
+; CHECK-RV32-NEXT:    slli a2, a3, 18
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_423
+; CHECK-RV32-NEXT:    j .LBB61_916
+; CHECK-RV32-NEXT:  .LBB61_423: # %else1586
+; CHECK-RV32-NEXT:    slli a2, a3, 17
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_424
+; CHECK-RV32-NEXT:    j .LBB61_917
+; CHECK-RV32-NEXT:  .LBB61_424: # %else1590
+; CHECK-RV32-NEXT:    slli a2, a3, 16
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_425
+; CHECK-RV32-NEXT:    j .LBB61_918
+; CHECK-RV32-NEXT:  .LBB61_425: # %else1594
+; CHECK-RV32-NEXT:    slli a2, a3, 15
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_426
+; CHECK-RV32-NEXT:    j .LBB61_919
+; CHECK-RV32-NEXT:  .LBB61_426: # %else1598
+; CHECK-RV32-NEXT:    slli a2, a3, 14
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_427
+; CHECK-RV32-NEXT:    j .LBB61_920
+; CHECK-RV32-NEXT:  .LBB61_427: # %else1602
+; CHECK-RV32-NEXT:    slli a2, a3, 13
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_428
+; CHECK-RV32-NEXT:    j .LBB61_921
+; CHECK-RV32-NEXT:  .LBB61_428: # %else1606
+; CHECK-RV32-NEXT:    slli a2, a3, 12
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_429
+; CHECK-RV32-NEXT:    j .LBB61_922
+; CHECK-RV32-NEXT:  .LBB61_429: # %else1610
+; CHECK-RV32-NEXT:    slli a2, a3, 11
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_430
+; CHECK-RV32-NEXT:    j .LBB61_923
+; CHECK-RV32-NEXT:  .LBB61_430: # %else1614
+; CHECK-RV32-NEXT:    slli a2, a3, 10
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_431
+; CHECK-RV32-NEXT:    j .LBB61_924
+; CHECK-RV32-NEXT:  .LBB61_431: # %else1618
+; CHECK-RV32-NEXT:    slli a2, a3, 9
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_432
+; CHECK-RV32-NEXT:    j .LBB61_925
+; CHECK-RV32-NEXT:  .LBB61_432: # %else1622
+; CHECK-RV32-NEXT:    slli a2, a3, 8
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_433
+; CHECK-RV32-NEXT:    j .LBB61_926
+; CHECK-RV32-NEXT:  .LBB61_433: # %else1626
+; CHECK-RV32-NEXT:    slli a2, a3, 7
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_434
+; CHECK-RV32-NEXT:    j .LBB61_927
+; CHECK-RV32-NEXT:  .LBB61_434: # %else1630
+; CHECK-RV32-NEXT:    slli a2, a3, 6
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_435
+; CHECK-RV32-NEXT:    j .LBB61_928
+; CHECK-RV32-NEXT:  .LBB61_435: # %else1634
+; CHECK-RV32-NEXT:    slli a2, a3, 5
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_436
+; CHECK-RV32-NEXT:    j .LBB61_929
+; CHECK-RV32-NEXT:  .LBB61_436: # %else1638
+; CHECK-RV32-NEXT:    slli a2, a3, 4
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_437
+; CHECK-RV32-NEXT:    j .LBB61_930
+; CHECK-RV32-NEXT:  .LBB61_437: # %else1642
+; CHECK-RV32-NEXT:    slli a2, a3, 3
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_438
+; CHECK-RV32-NEXT:    j .LBB61_931
+; CHECK-RV32-NEXT:  .LBB61_438: # %else1646
+; CHECK-RV32-NEXT:    slli a2, a3, 2
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_440
+; CHECK-RV32-NEXT:  .LBB61_439: # %cond.load1649
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 414
+; CHECK-RV32-NEXT:    li a4, 413
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:  .LBB61_440: # %else1650
+; CHECK-RV32-NEXT:    slli a2, a3, 1
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vsrl.vx v16, v16, a1
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_442
+; CHECK-RV32-NEXT:  # %bb.441: # %cond.load1653
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 415
+; CHECK-RV32-NEXT:    li a4, 414
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:  .LBB61_442: # %else1654
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.x.s a2, v16
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_443
+; CHECK-RV32-NEXT:    j .LBB61_932
+; CHECK-RV32-NEXT:  .LBB61_443: # %else1658
+; CHECK-RV32-NEXT:    andi a3, a2, 1
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_444
+; CHECK-RV32-NEXT:    j .LBB61_933
+; CHECK-RV32-NEXT:  .LBB61_444: # %else1662
+; CHECK-RV32-NEXT:    andi a3, a2, 2
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_445
+; CHECK-RV32-NEXT:    j .LBB61_934
+; CHECK-RV32-NEXT:  .LBB61_445: # %else1666
+; CHECK-RV32-NEXT:    andi a3, a2, 4
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_446
+; CHECK-RV32-NEXT:    j .LBB61_935
+; CHECK-RV32-NEXT:  .LBB61_446: # %else1670
+; CHECK-RV32-NEXT:    andi a3, a2, 8
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_447
+; CHECK-RV32-NEXT:    j .LBB61_936
+; CHECK-RV32-NEXT:  .LBB61_447: # %else1674
+; CHECK-RV32-NEXT:    andi a3, a2, 16
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_448
+; CHECK-RV32-NEXT:    j .LBB61_937
+; CHECK-RV32-NEXT:  .LBB61_448: # %else1678
+; CHECK-RV32-NEXT:    andi a3, a2, 32
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_449
+; CHECK-RV32-NEXT:    j .LBB61_938
+; CHECK-RV32-NEXT:  .LBB61_449: # %else1682
+; CHECK-RV32-NEXT:    andi a3, a2, 64
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_450
+; CHECK-RV32-NEXT:    j .LBB61_939
+; CHECK-RV32-NEXT:  .LBB61_450: # %else1686
+; CHECK-RV32-NEXT:    andi a3, a2, 128
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_451
+; CHECK-RV32-NEXT:    j .LBB61_940
+; CHECK-RV32-NEXT:  .LBB61_451: # %else1690
+; CHECK-RV32-NEXT:    andi a3, a2, 256
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_452
+; CHECK-RV32-NEXT:    j .LBB61_941
+; CHECK-RV32-NEXT:  .LBB61_452: # %else1694
+; CHECK-RV32-NEXT:    andi a3, a2, 512
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_453
+; CHECK-RV32-NEXT:    j .LBB61_942
+; CHECK-RV32-NEXT:  .LBB61_453: # %else1698
+; CHECK-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-RV32-NEXT:    beqz a3, .LBB61_454
+; CHECK-RV32-NEXT:    j .LBB61_943
+; CHECK-RV32-NEXT:  .LBB61_454: # %else1702
+; CHECK-RV32-NEXT:    slli a3, a2, 20
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_455
+; CHECK-RV32-NEXT:    j .LBB61_944
+; CHECK-RV32-NEXT:  .LBB61_455: # %else1706
+; CHECK-RV32-NEXT:    slli a3, a2, 19
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_456
+; CHECK-RV32-NEXT:    j .LBB61_945
+; CHECK-RV32-NEXT:  .LBB61_456: # %else1710
+; CHECK-RV32-NEXT:    slli a3, a2, 18
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_457
+; CHECK-RV32-NEXT:    j .LBB61_946
+; CHECK-RV32-NEXT:  .LBB61_457: # %else1714
+; CHECK-RV32-NEXT:    slli a3, a2, 17
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_458
+; CHECK-RV32-NEXT:    j .LBB61_947
+; CHECK-RV32-NEXT:  .LBB61_458: # %else1718
+; CHECK-RV32-NEXT:    slli a3, a2, 16
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_459
+; CHECK-RV32-NEXT:    j .LBB61_948
+; CHECK-RV32-NEXT:  .LBB61_459: # %else1722
+; CHECK-RV32-NEXT:    slli a3, a2, 15
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_460
+; CHECK-RV32-NEXT:    j .LBB61_949
+; CHECK-RV32-NEXT:  .LBB61_460: # %else1726
+; CHECK-RV32-NEXT:    slli a3, a2, 14
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_461
+; CHECK-RV32-NEXT:    j .LBB61_950
+; CHECK-RV32-NEXT:  .LBB61_461: # %else1730
+; CHECK-RV32-NEXT:    slli a3, a2, 13
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_462
+; CHECK-RV32-NEXT:    j .LBB61_951
+; CHECK-RV32-NEXT:  .LBB61_462: # %else1734
+; CHECK-RV32-NEXT:    slli a3, a2, 12
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_463
+; CHECK-RV32-NEXT:    j .LBB61_952
+; CHECK-RV32-NEXT:  .LBB61_463: # %else1738
+; CHECK-RV32-NEXT:    slli a3, a2, 11
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_464
+; CHECK-RV32-NEXT:    j .LBB61_953
+; CHECK-RV32-NEXT:  .LBB61_464: # %else1742
+; CHECK-RV32-NEXT:    slli a3, a2, 10
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_465
+; CHECK-RV32-NEXT:    j .LBB61_954
+; CHECK-RV32-NEXT:  .LBB61_465: # %else1746
+; CHECK-RV32-NEXT:    slli a3, a2, 9
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_466
+; CHECK-RV32-NEXT:    j .LBB61_955
+; CHECK-RV32-NEXT:  .LBB61_466: # %else1750
+; CHECK-RV32-NEXT:    slli a3, a2, 8
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_467
+; CHECK-RV32-NEXT:    j .LBB61_956
+; CHECK-RV32-NEXT:  .LBB61_467: # %else1754
+; CHECK-RV32-NEXT:    slli a3, a2, 7
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_468
+; CHECK-RV32-NEXT:    j .LBB61_957
+; CHECK-RV32-NEXT:  .LBB61_468: # %else1758
+; CHECK-RV32-NEXT:    slli a3, a2, 6
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_469
+; CHECK-RV32-NEXT:    j .LBB61_958
+; CHECK-RV32-NEXT:  .LBB61_469: # %else1762
+; CHECK-RV32-NEXT:    slli a3, a2, 5
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_470
+; CHECK-RV32-NEXT:    j .LBB61_959
+; CHECK-RV32-NEXT:  .LBB61_470: # %else1766
+; CHECK-RV32-NEXT:    slli a3, a2, 4
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_471
+; CHECK-RV32-NEXT:    j .LBB61_960
+; CHECK-RV32-NEXT:  .LBB61_471: # %else1770
+; CHECK-RV32-NEXT:    slli a3, a2, 3
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_472
+; CHECK-RV32-NEXT:    j .LBB61_961
+; CHECK-RV32-NEXT:  .LBB61_472: # %else1774
+; CHECK-RV32-NEXT:    slli a3, a2, 2
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_474
+; CHECK-RV32-NEXT:  .LBB61_473: # %cond.load1777
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 446
+; CHECK-RV32-NEXT:    li a4, 445
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:  .LBB61_474: # %else1778
+; CHECK-RV32-NEXT:    slli a3, a2, 1
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vslidedown.vi v16, v0, 7
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_476
+; CHECK-RV32-NEXT:  # %bb.475: # %cond.load1781
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    li a3, 447
+; CHECK-RV32-NEXT:    li a4, 446
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:  .LBB61_476: # %else1782
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.x.s a3, v16
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_477
+; CHECK-RV32-NEXT:    j .LBB61_962
+; CHECK-RV32-NEXT:  .LBB61_477: # %else1786
+; CHECK-RV32-NEXT:    andi a2, a3, 1
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_478
+; CHECK-RV32-NEXT:    j .LBB61_963
+; CHECK-RV32-NEXT:  .LBB61_478: # %else1790
+; CHECK-RV32-NEXT:    andi a2, a3, 2
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_479
+; CHECK-RV32-NEXT:    j .LBB61_964
+; CHECK-RV32-NEXT:  .LBB61_479: # %else1794
+; CHECK-RV32-NEXT:    andi a2, a3, 4
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_480
+; CHECK-RV32-NEXT:    j .LBB61_965
+; CHECK-RV32-NEXT:  .LBB61_480: # %else1798
+; CHECK-RV32-NEXT:    andi a2, a3, 8
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_481
+; CHECK-RV32-NEXT:    j .LBB61_966
+; CHECK-RV32-NEXT:  .LBB61_481: # %else1802
+; CHECK-RV32-NEXT:    andi a2, a3, 16
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_482
+; CHECK-RV32-NEXT:    j .LBB61_967
+; CHECK-RV32-NEXT:  .LBB61_482: # %else1806
+; CHECK-RV32-NEXT:    andi a2, a3, 32
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_483
+; CHECK-RV32-NEXT:    j .LBB61_968
+; CHECK-RV32-NEXT:  .LBB61_483: # %else1810
+; CHECK-RV32-NEXT:    andi a2, a3, 64
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_484
+; CHECK-RV32-NEXT:    j .LBB61_969
+; CHECK-RV32-NEXT:  .LBB61_484: # %else1814
+; CHECK-RV32-NEXT:    andi a2, a3, 128
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_485
+; CHECK-RV32-NEXT:    j .LBB61_970
+; CHECK-RV32-NEXT:  .LBB61_485: # %else1818
+; CHECK-RV32-NEXT:    andi a2, a3, 256
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_486
+; CHECK-RV32-NEXT:    j .LBB61_971
+; CHECK-RV32-NEXT:  .LBB61_486: # %else1822
+; CHECK-RV32-NEXT:    andi a2, a3, 512
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_487
+; CHECK-RV32-NEXT:    j .LBB61_972
+; CHECK-RV32-NEXT:  .LBB61_487: # %else1826
+; CHECK-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_488
+; CHECK-RV32-NEXT:    j .LBB61_973
+; CHECK-RV32-NEXT:  .LBB61_488: # %else1830
+; CHECK-RV32-NEXT:    slli a2, a3, 20
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_489
+; CHECK-RV32-NEXT:    j .LBB61_974
+; CHECK-RV32-NEXT:  .LBB61_489: # %else1834
+; CHECK-RV32-NEXT:    slli a2, a3, 19
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_490
+; CHECK-RV32-NEXT:    j .LBB61_975
+; CHECK-RV32-NEXT:  .LBB61_490: # %else1838
+; CHECK-RV32-NEXT:    slli a2, a3, 18
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_491
+; CHECK-RV32-NEXT:    j .LBB61_976
+; CHECK-RV32-NEXT:  .LBB61_491: # %else1842
+; CHECK-RV32-NEXT:    slli a2, a3, 17
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_492
+; CHECK-RV32-NEXT:    j .LBB61_977
+; CHECK-RV32-NEXT:  .LBB61_492: # %else1846
+; CHECK-RV32-NEXT:    slli a2, a3, 16
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_493
+; CHECK-RV32-NEXT:    j .LBB61_978
+; CHECK-RV32-NEXT:  .LBB61_493: # %else1850
+; CHECK-RV32-NEXT:    slli a2, a3, 15
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_494
+; CHECK-RV32-NEXT:    j .LBB61_979
+; CHECK-RV32-NEXT:  .LBB61_494: # %else1854
+; CHECK-RV32-NEXT:    slli a2, a3, 14
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_495
+; CHECK-RV32-NEXT:    j .LBB61_980
+; CHECK-RV32-NEXT:  .LBB61_495: # %else1858
+; CHECK-RV32-NEXT:    slli a2, a3, 13
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_496
+; CHECK-RV32-NEXT:    j .LBB61_981
+; CHECK-RV32-NEXT:  .LBB61_496: # %else1862
+; CHECK-RV32-NEXT:    slli a2, a3, 12
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_497
+; CHECK-RV32-NEXT:    j .LBB61_982
+; CHECK-RV32-NEXT:  .LBB61_497: # %else1866
+; CHECK-RV32-NEXT:    slli a2, a3, 11
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_498
+; CHECK-RV32-NEXT:    j .LBB61_983
+; CHECK-RV32-NEXT:  .LBB61_498: # %else1870
+; CHECK-RV32-NEXT:    slli a2, a3, 10
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_499
+; CHECK-RV32-NEXT:    j .LBB61_984
+; CHECK-RV32-NEXT:  .LBB61_499: # %else1874
+; CHECK-RV32-NEXT:    slli a2, a3, 9
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_500
+; CHECK-RV32-NEXT:    j .LBB61_985
+; CHECK-RV32-NEXT:  .LBB61_500: # %else1878
+; CHECK-RV32-NEXT:    slli a2, a3, 8
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_501
+; CHECK-RV32-NEXT:    j .LBB61_986
+; CHECK-RV32-NEXT:  .LBB61_501: # %else1882
+; CHECK-RV32-NEXT:    slli a2, a3, 7
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_502
+; CHECK-RV32-NEXT:    j .LBB61_987
+; CHECK-RV32-NEXT:  .LBB61_502: # %else1886
+; CHECK-RV32-NEXT:    slli a2, a3, 6
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_503
+; CHECK-RV32-NEXT:    j .LBB61_988
+; CHECK-RV32-NEXT:  .LBB61_503: # %else1890
+; CHECK-RV32-NEXT:    slli a2, a3, 5
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_504
+; CHECK-RV32-NEXT:    j .LBB61_989
+; CHECK-RV32-NEXT:  .LBB61_504: # %else1894
+; CHECK-RV32-NEXT:    slli a2, a3, 4
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_505
+; CHECK-RV32-NEXT:    j .LBB61_990
+; CHECK-RV32-NEXT:  .LBB61_505: # %else1898
+; CHECK-RV32-NEXT:    slli a2, a3, 3
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_506
+; CHECK-RV32-NEXT:    j .LBB61_991
+; CHECK-RV32-NEXT:  .LBB61_506: # %else1902
+; CHECK-RV32-NEXT:    slli a2, a3, 2
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_508
+; CHECK-RV32-NEXT:  .LBB61_507: # %cond.load1905
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 478
+; CHECK-RV32-NEXT:    li a4, 477
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:  .LBB61_508: # %else1906
+; CHECK-RV32-NEXT:    slli a2, a3, 1
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vsrl.vx v16, v16, a1
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_510
+; CHECK-RV32-NEXT:  # %bb.509: # %cond.load1909
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v24, a1
+; CHECK-RV32-NEXT:    li a1, 479
+; CHECK-RV32-NEXT:    li a2, 478
+; CHECK-RV32-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a2
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:  .LBB61_510: # %else1910
+; CHECK-RV32-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.x.s a1, v16
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_511
+; CHECK-RV32-NEXT:    j .LBB61_992
+; CHECK-RV32-NEXT:  .LBB61_511: # %else1914
+; CHECK-RV32-NEXT:    andi a2, a1, 1
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_512
+; CHECK-RV32-NEXT:    j .LBB61_993
+; CHECK-RV32-NEXT:  .LBB61_512: # %else1918
+; CHECK-RV32-NEXT:    andi a2, a1, 2
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_513
+; CHECK-RV32-NEXT:    j .LBB61_994
+; CHECK-RV32-NEXT:  .LBB61_513: # %else1922
+; CHECK-RV32-NEXT:    andi a2, a1, 4
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_514
+; CHECK-RV32-NEXT:    j .LBB61_995
+; CHECK-RV32-NEXT:  .LBB61_514: # %else1926
+; CHECK-RV32-NEXT:    andi a2, a1, 8
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_515
+; CHECK-RV32-NEXT:    j .LBB61_996
+; CHECK-RV32-NEXT:  .LBB61_515: # %else1930
+; CHECK-RV32-NEXT:    andi a2, a1, 16
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_516
+; CHECK-RV32-NEXT:    j .LBB61_997
+; CHECK-RV32-NEXT:  .LBB61_516: # %else1934
+; CHECK-RV32-NEXT:    andi a2, a1, 32
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_517
+; CHECK-RV32-NEXT:    j .LBB61_998
+; CHECK-RV32-NEXT:  .LBB61_517: # %else1938
+; CHECK-RV32-NEXT:    andi a2, a1, 64
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_518
+; CHECK-RV32-NEXT:    j .LBB61_999
+; CHECK-RV32-NEXT:  .LBB61_518: # %else1942
+; CHECK-RV32-NEXT:    andi a2, a1, 128
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_519
+; CHECK-RV32-NEXT:    j .LBB61_1000
+; CHECK-RV32-NEXT:  .LBB61_519: # %else1946
+; CHECK-RV32-NEXT:    andi a2, a1, 256
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_520
+; CHECK-RV32-NEXT:    j .LBB61_1001
+; CHECK-RV32-NEXT:  .LBB61_520: # %else1950
+; CHECK-RV32-NEXT:    andi a2, a1, 512
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_521
+; CHECK-RV32-NEXT:    j .LBB61_1002
+; CHECK-RV32-NEXT:  .LBB61_521: # %else1954
+; CHECK-RV32-NEXT:    andi a2, a1, 1024
+; CHECK-RV32-NEXT:    beqz a2, .LBB61_522
+; CHECK-RV32-NEXT:    j .LBB61_1003
+; CHECK-RV32-NEXT:  .LBB61_522: # %else1958
+; CHECK-RV32-NEXT:    slli a2, a1, 20
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_523
+; CHECK-RV32-NEXT:    j .LBB61_1004
+; CHECK-RV32-NEXT:  .LBB61_523: # %else1962
+; CHECK-RV32-NEXT:    slli a2, a1, 19
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_524
+; CHECK-RV32-NEXT:    j .LBB61_1005
+; CHECK-RV32-NEXT:  .LBB61_524: # %else1966
+; CHECK-RV32-NEXT:    slli a2, a1, 18
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_525
+; CHECK-RV32-NEXT:    j .LBB61_1006
+; CHECK-RV32-NEXT:  .LBB61_525: # %else1970
+; CHECK-RV32-NEXT:    slli a2, a1, 17
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_526
+; CHECK-RV32-NEXT:    j .LBB61_1007
+; CHECK-RV32-NEXT:  .LBB61_526: # %else1974
+; CHECK-RV32-NEXT:    slli a2, a1, 16
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_527
+; CHECK-RV32-NEXT:    j .LBB61_1008
+; CHECK-RV32-NEXT:  .LBB61_527: # %else1978
+; CHECK-RV32-NEXT:    slli a2, a1, 15
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_528
+; CHECK-RV32-NEXT:    j .LBB61_1009
+; CHECK-RV32-NEXT:  .LBB61_528: # %else1982
+; CHECK-RV32-NEXT:    slli a2, a1, 14
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_529
+; CHECK-RV32-NEXT:    j .LBB61_1010
+; CHECK-RV32-NEXT:  .LBB61_529: # %else1986
+; CHECK-RV32-NEXT:    slli a2, a1, 13
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_530
+; CHECK-RV32-NEXT:    j .LBB61_1011
+; CHECK-RV32-NEXT:  .LBB61_530: # %else1990
+; CHECK-RV32-NEXT:    slli a2, a1, 12
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_531
+; CHECK-RV32-NEXT:    j .LBB61_1012
+; CHECK-RV32-NEXT:  .LBB61_531: # %else1994
+; CHECK-RV32-NEXT:    slli a2, a1, 11
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_532
+; CHECK-RV32-NEXT:    j .LBB61_1013
+; CHECK-RV32-NEXT:  .LBB61_532: # %else1998
+; CHECK-RV32-NEXT:    slli a2, a1, 10
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_533
+; CHECK-RV32-NEXT:    j .LBB61_1014
+; CHECK-RV32-NEXT:  .LBB61_533: # %else2002
+; CHECK-RV32-NEXT:    slli a2, a1, 9
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_534
+; CHECK-RV32-NEXT:    j .LBB61_1015
+; CHECK-RV32-NEXT:  .LBB61_534: # %else2006
+; CHECK-RV32-NEXT:    slli a2, a1, 8
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_535
+; CHECK-RV32-NEXT:    j .LBB61_1016
+; CHECK-RV32-NEXT:  .LBB61_535: # %else2010
+; CHECK-RV32-NEXT:    slli a2, a1, 7
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_536
+; CHECK-RV32-NEXT:    j .LBB61_1017
+; CHECK-RV32-NEXT:  .LBB61_536: # %else2014
+; CHECK-RV32-NEXT:    slli a2, a1, 6
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_537
+; CHECK-RV32-NEXT:    j .LBB61_1018
+; CHECK-RV32-NEXT:  .LBB61_537: # %else2018
+; CHECK-RV32-NEXT:    slli a2, a1, 5
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_538
+; CHECK-RV32-NEXT:    j .LBB61_1019
+; CHECK-RV32-NEXT:  .LBB61_538: # %else2022
+; CHECK-RV32-NEXT:    slli a2, a1, 4
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_539
+; CHECK-RV32-NEXT:    j .LBB61_1020
+; CHECK-RV32-NEXT:  .LBB61_539: # %else2026
+; CHECK-RV32-NEXT:    slli a2, a1, 3
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_540
+; CHECK-RV32-NEXT:    j .LBB61_1021
+; CHECK-RV32-NEXT:  .LBB61_540: # %else2030
+; CHECK-RV32-NEXT:    slli a2, a1, 2
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_541
+; CHECK-RV32-NEXT:    j .LBB61_1022
+; CHECK-RV32-NEXT:  .LBB61_541: # %else2034
+; CHECK-RV32-NEXT:    slli a2, a1, 1
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_542
+; CHECK-RV32-NEXT:    j .LBB61_1023
+; CHECK-RV32-NEXT:  .LBB61_542: # %else2038
+; CHECK-RV32-NEXT:    bgez a1, .LBB61_543
+; CHECK-RV32-NEXT:    j .LBB61_1024
+; CHECK-RV32-NEXT:  .LBB61_543: # %else2042
+; CHECK-RV32-NEXT:    ret
+; CHECK-RV32-NEXT:  .LBB61_544: # %cond.load
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    vsetvli zero, zero, e8, mf8, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v8, a1
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a1, a3, 2
+; CHECK-RV32-NEXT:    bnez a1, .LBB61_545
+; CHECK-RV32-NEXT:    j .LBB61_2
+; CHECK-RV32-NEXT:  .LBB61_545: # %cond.load1
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetvli zero, zero, e8, mf8, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vsetivli zero, 2, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 1
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a1, a3, 4
+; CHECK-RV32-NEXT:    bnez a1, .LBB61_546
+; CHECK-RV32-NEXT:    j .LBB61_3
+; CHECK-RV32-NEXT:  .LBB61_546: # %cond.load5
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 2
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a1, a3, 8
+; CHECK-RV32-NEXT:    bnez a1, .LBB61_547
+; CHECK-RV32-NEXT:    j .LBB61_4
+; CHECK-RV32-NEXT:  .LBB61_547: # %cond.load9
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 4, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a1, a3, 16
+; CHECK-RV32-NEXT:    bnez a1, .LBB61_548
+; CHECK-RV32-NEXT:    j .LBB61_5
+; CHECK-RV32-NEXT:  .LBB61_548: # %cond.load13
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 5, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a1, a3, 32
+; CHECK-RV32-NEXT:    bnez a1, .LBB61_549
+; CHECK-RV32-NEXT:    j .LBB61_6
+; CHECK-RV32-NEXT:  .LBB61_549: # %cond.load17
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 6, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 5
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a1, a3, 64
+; CHECK-RV32-NEXT:    bnez a1, .LBB61_550
+; CHECK-RV32-NEXT:    j .LBB61_7
+; CHECK-RV32-NEXT:  .LBB61_550: # %cond.load21
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 7, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 6
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a1, a3, 128
+; CHECK-RV32-NEXT:    bnez a1, .LBB61_551
+; CHECK-RV32-NEXT:    j .LBB61_8
+; CHECK-RV32-NEXT:  .LBB61_551: # %cond.load25
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 8, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 7
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a1, a3, 256
+; CHECK-RV32-NEXT:    bnez a1, .LBB61_552
+; CHECK-RV32-NEXT:    j .LBB61_9
+; CHECK-RV32-NEXT:  .LBB61_552: # %cond.load29
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 9, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 8
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a1, a3, 512
+; CHECK-RV32-NEXT:    bnez a1, .LBB61_553
+; CHECK-RV32-NEXT:    j .LBB61_10
+; CHECK-RV32-NEXT:  .LBB61_553: # %cond.load33
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 10, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 9
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a1, a3, 1024
+; CHECK-RV32-NEXT:    bnez a1, .LBB61_554
+; CHECK-RV32-NEXT:    j .LBB61_11
+; CHECK-RV32-NEXT:  .LBB61_554: # %cond.load37
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 11, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 10
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a1, a3, 20
+; CHECK-RV32-NEXT:    bltz a1, .LBB61_555
+; CHECK-RV32-NEXT:    j .LBB61_12
+; CHECK-RV32-NEXT:  .LBB61_555: # %cond.load41
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 12, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 11
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a1, a3, 19
+; CHECK-RV32-NEXT:    bltz a1, .LBB61_556
+; CHECK-RV32-NEXT:    j .LBB61_13
+; CHECK-RV32-NEXT:  .LBB61_556: # %cond.load45
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 13, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 12
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a1, a3, 18
+; CHECK-RV32-NEXT:    bltz a1, .LBB61_557
+; CHECK-RV32-NEXT:    j .LBB61_14
+; CHECK-RV32-NEXT:  .LBB61_557: # %cond.load49
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 14, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 13
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a1, a3, 17
+; CHECK-RV32-NEXT:    bltz a1, .LBB61_558
+; CHECK-RV32-NEXT:    j .LBB61_15
+; CHECK-RV32-NEXT:  .LBB61_558: # %cond.load53
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 15, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 14
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a1, a3, 16
+; CHECK-RV32-NEXT:    bltz a1, .LBB61_559
+; CHECK-RV32-NEXT:    j .LBB61_16
+; CHECK-RV32-NEXT:  .LBB61_559: # %cond.load57
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 16, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 15
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a1, a3, 15
+; CHECK-RV32-NEXT:    bltz a1, .LBB61_560
+; CHECK-RV32-NEXT:    j .LBB61_17
+; CHECK-RV32-NEXT:  .LBB61_560: # %cond.load61
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 17, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 16
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a1, a3, 14
+; CHECK-RV32-NEXT:    bltz a1, .LBB61_561
+; CHECK-RV32-NEXT:    j .LBB61_18
+; CHECK-RV32-NEXT:  .LBB61_561: # %cond.load65
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 18, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 17
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a1, a3, 13
+; CHECK-RV32-NEXT:    bltz a1, .LBB61_562
+; CHECK-RV32-NEXT:    j .LBB61_19
+; CHECK-RV32-NEXT:  .LBB61_562: # %cond.load69
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 19, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 18
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a1, a3, 12
+; CHECK-RV32-NEXT:    bltz a1, .LBB61_563
+; CHECK-RV32-NEXT:    j .LBB61_20
+; CHECK-RV32-NEXT:  .LBB61_563: # %cond.load73
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 20, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 19
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a1, a3, 11
+; CHECK-RV32-NEXT:    bltz a1, .LBB61_564
+; CHECK-RV32-NEXT:    j .LBB61_21
+; CHECK-RV32-NEXT:  .LBB61_564: # %cond.load77
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 21, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 20
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a1, a3, 10
+; CHECK-RV32-NEXT:    bltz a1, .LBB61_565
+; CHECK-RV32-NEXT:    j .LBB61_22
+; CHECK-RV32-NEXT:  .LBB61_565: # %cond.load81
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 22, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 21
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a1, a3, 9
+; CHECK-RV32-NEXT:    bltz a1, .LBB61_566
+; CHECK-RV32-NEXT:    j .LBB61_23
+; CHECK-RV32-NEXT:  .LBB61_566: # %cond.load85
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 23, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 22
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a1, a3, 8
+; CHECK-RV32-NEXT:    bltz a1, .LBB61_567
+; CHECK-RV32-NEXT:    j .LBB61_24
+; CHECK-RV32-NEXT:  .LBB61_567: # %cond.load89
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 24, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 23
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a1, a3, 7
+; CHECK-RV32-NEXT:    bltz a1, .LBB61_568
+; CHECK-RV32-NEXT:    j .LBB61_25
+; CHECK-RV32-NEXT:  .LBB61_568: # %cond.load93
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 25, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 24
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a1, a3, 6
+; CHECK-RV32-NEXT:    bltz a1, .LBB61_569
+; CHECK-RV32-NEXT:    j .LBB61_26
+; CHECK-RV32-NEXT:  .LBB61_569: # %cond.load97
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 26, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 25
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a1, a3, 5
+; CHECK-RV32-NEXT:    bltz a1, .LBB61_570
+; CHECK-RV32-NEXT:    j .LBB61_27
+; CHECK-RV32-NEXT:  .LBB61_570: # %cond.load101
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 27, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 26
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a1, a3, 4
+; CHECK-RV32-NEXT:    bltz a1, .LBB61_571
+; CHECK-RV32-NEXT:    j .LBB61_28
+; CHECK-RV32-NEXT:  .LBB61_571: # %cond.load105
+; CHECK-RV32-NEXT:    lbu a1, 0(a0)
+; CHECK-RV32-NEXT:    vsetivli zero, 28, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a1
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    vslideup.vi v8, v16, 27
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a1, a3, 3
+; CHECK-RV32-NEXT:    bgez a1, .LBB61_1025
+; CHECK-RV32-NEXT:    j .LBB61_29
+; CHECK-RV32-NEXT:  .LBB61_1025: # %cond.load105
+; CHECK-RV32-NEXT:    j .LBB61_30
+; CHECK-RV32-NEXT:  .LBB61_572: # %cond.load121
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 32
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vi v8, v24, 31
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 1
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_573
+; CHECK-RV32-NEXT:    j .LBB61_36
+; CHECK-RV32-NEXT:  .LBB61_573: # %cond.load125
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 33
+; CHECK-RV32-NEXT:    li a4, 32
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 2
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_574
+; CHECK-RV32-NEXT:    j .LBB61_37
+; CHECK-RV32-NEXT:  .LBB61_574: # %cond.load129
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 34
+; CHECK-RV32-NEXT:    li a4, 33
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 4
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_575
+; CHECK-RV32-NEXT:    j .LBB61_38
+; CHECK-RV32-NEXT:  .LBB61_575: # %cond.load133
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 35
+; CHECK-RV32-NEXT:    li a4, 34
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 8
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_576
+; CHECK-RV32-NEXT:    j .LBB61_39
+; CHECK-RV32-NEXT:  .LBB61_576: # %cond.load137
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 36
+; CHECK-RV32-NEXT:    li a4, 35
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 16
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_577
+; CHECK-RV32-NEXT:    j .LBB61_40
+; CHECK-RV32-NEXT:  .LBB61_577: # %cond.load141
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 37
+; CHECK-RV32-NEXT:    li a4, 36
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 32
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_578
+; CHECK-RV32-NEXT:    j .LBB61_41
+; CHECK-RV32-NEXT:  .LBB61_578: # %cond.load145
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 38
+; CHECK-RV32-NEXT:    li a4, 37
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 64
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_579
+; CHECK-RV32-NEXT:    j .LBB61_42
+; CHECK-RV32-NEXT:  .LBB61_579: # %cond.load149
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 39
+; CHECK-RV32-NEXT:    li a4, 38
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 128
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_580
+; CHECK-RV32-NEXT:    j .LBB61_43
+; CHECK-RV32-NEXT:  .LBB61_580: # %cond.load153
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 40
+; CHECK-RV32-NEXT:    li a4, 39
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 256
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_581
+; CHECK-RV32-NEXT:    j .LBB61_44
+; CHECK-RV32-NEXT:  .LBB61_581: # %cond.load157
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 41
+; CHECK-RV32-NEXT:    li a4, 40
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 512
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_582
+; CHECK-RV32-NEXT:    j .LBB61_45
+; CHECK-RV32-NEXT:  .LBB61_582: # %cond.load161
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 42
+; CHECK-RV32-NEXT:    li a4, 41
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_583
+; CHECK-RV32-NEXT:    j .LBB61_46
+; CHECK-RV32-NEXT:  .LBB61_583: # %cond.load165
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 43
+; CHECK-RV32-NEXT:    li a4, 42
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 20
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_584
+; CHECK-RV32-NEXT:    j .LBB61_47
+; CHECK-RV32-NEXT:  .LBB61_584: # %cond.load169
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 44
+; CHECK-RV32-NEXT:    li a4, 43
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 19
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_585
+; CHECK-RV32-NEXT:    j .LBB61_48
+; CHECK-RV32-NEXT:  .LBB61_585: # %cond.load173
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 45
+; CHECK-RV32-NEXT:    li a4, 44
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 18
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_586
+; CHECK-RV32-NEXT:    j .LBB61_49
+; CHECK-RV32-NEXT:  .LBB61_586: # %cond.load177
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 46
+; CHECK-RV32-NEXT:    li a4, 45
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 17
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_587
+; CHECK-RV32-NEXT:    j .LBB61_50
+; CHECK-RV32-NEXT:  .LBB61_587: # %cond.load181
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 47
+; CHECK-RV32-NEXT:    li a4, 46
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 16
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_588
+; CHECK-RV32-NEXT:    j .LBB61_51
+; CHECK-RV32-NEXT:  .LBB61_588: # %cond.load185
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 48
+; CHECK-RV32-NEXT:    li a4, 47
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 15
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_589
+; CHECK-RV32-NEXT:    j .LBB61_52
+; CHECK-RV32-NEXT:  .LBB61_589: # %cond.load189
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 49
+; CHECK-RV32-NEXT:    li a4, 48
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 14
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_590
+; CHECK-RV32-NEXT:    j .LBB61_53
+; CHECK-RV32-NEXT:  .LBB61_590: # %cond.load193
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 50
+; CHECK-RV32-NEXT:    li a4, 49
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 13
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_591
+; CHECK-RV32-NEXT:    j .LBB61_54
+; CHECK-RV32-NEXT:  .LBB61_591: # %cond.load197
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 51
+; CHECK-RV32-NEXT:    li a4, 50
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 12
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_592
+; CHECK-RV32-NEXT:    j .LBB61_55
+; CHECK-RV32-NEXT:  .LBB61_592: # %cond.load201
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 52
+; CHECK-RV32-NEXT:    li a4, 51
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 11
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_593
+; CHECK-RV32-NEXT:    j .LBB61_56
+; CHECK-RV32-NEXT:  .LBB61_593: # %cond.load205
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 53
+; CHECK-RV32-NEXT:    li a4, 52
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 10
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_594
+; CHECK-RV32-NEXT:    j .LBB61_57
+; CHECK-RV32-NEXT:  .LBB61_594: # %cond.load209
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 54
+; CHECK-RV32-NEXT:    li a4, 53
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 9
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_595
+; CHECK-RV32-NEXT:    j .LBB61_58
+; CHECK-RV32-NEXT:  .LBB61_595: # %cond.load213
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 55
+; CHECK-RV32-NEXT:    li a4, 54
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 8
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_596
+; CHECK-RV32-NEXT:    j .LBB61_59
+; CHECK-RV32-NEXT:  .LBB61_596: # %cond.load217
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 56
+; CHECK-RV32-NEXT:    li a4, 55
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 7
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_597
+; CHECK-RV32-NEXT:    j .LBB61_60
+; CHECK-RV32-NEXT:  .LBB61_597: # %cond.load221
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 57
+; CHECK-RV32-NEXT:    li a4, 56
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 6
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_598
+; CHECK-RV32-NEXT:    j .LBB61_61
+; CHECK-RV32-NEXT:  .LBB61_598: # %cond.load225
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 58
+; CHECK-RV32-NEXT:    li a4, 57
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 5
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_599
+; CHECK-RV32-NEXT:    j .LBB61_62
+; CHECK-RV32-NEXT:  .LBB61_599: # %cond.load229
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 59
+; CHECK-RV32-NEXT:    li a4, 58
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 4
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_600
+; CHECK-RV32-NEXT:    j .LBB61_63
+; CHECK-RV32-NEXT:  .LBB61_600: # %cond.load233
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 60
+; CHECK-RV32-NEXT:    li a4, 59
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 3
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_601
+; CHECK-RV32-NEXT:    j .LBB61_64
+; CHECK-RV32-NEXT:  .LBB61_601: # %cond.load237
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 61
+; CHECK-RV32-NEXT:    li a4, 60
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 2
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_1026
+; CHECK-RV32-NEXT:    j .LBB61_65
+; CHECK-RV32-NEXT:  .LBB61_1026: # %cond.load237
+; CHECK-RV32-NEXT:    j .LBB61_66
+; CHECK-RV32-NEXT:  .LBB61_602: # %cond.load249
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v17, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 64
+; CHECK-RV32-NEXT:    li a4, 63
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m1, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v17, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv1r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 1
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_603
+; CHECK-RV32-NEXT:    j .LBB61_70
+; CHECK-RV32-NEXT:  .LBB61_603: # %cond.load253
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 65
+; CHECK-RV32-NEXT:    li a4, 64
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 2
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_604
+; CHECK-RV32-NEXT:    j .LBB61_71
+; CHECK-RV32-NEXT:  .LBB61_604: # %cond.load257
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 66
+; CHECK-RV32-NEXT:    li a4, 65
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 4
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_605
+; CHECK-RV32-NEXT:    j .LBB61_72
+; CHECK-RV32-NEXT:  .LBB61_605: # %cond.load261
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 67
+; CHECK-RV32-NEXT:    li a4, 66
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 8
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_606
+; CHECK-RV32-NEXT:    j .LBB61_73
+; CHECK-RV32-NEXT:  .LBB61_606: # %cond.load265
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 68
+; CHECK-RV32-NEXT:    li a4, 67
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 16
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_607
+; CHECK-RV32-NEXT:    j .LBB61_74
+; CHECK-RV32-NEXT:  .LBB61_607: # %cond.load269
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 69
+; CHECK-RV32-NEXT:    li a4, 68
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 32
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_608
+; CHECK-RV32-NEXT:    j .LBB61_75
+; CHECK-RV32-NEXT:  .LBB61_608: # %cond.load273
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 70
+; CHECK-RV32-NEXT:    li a4, 69
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 64
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_609
+; CHECK-RV32-NEXT:    j .LBB61_76
+; CHECK-RV32-NEXT:  .LBB61_609: # %cond.load277
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 71
+; CHECK-RV32-NEXT:    li a4, 70
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 128
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_610
+; CHECK-RV32-NEXT:    j .LBB61_77
+; CHECK-RV32-NEXT:  .LBB61_610: # %cond.load281
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 72
+; CHECK-RV32-NEXT:    li a4, 71
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 256
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_611
+; CHECK-RV32-NEXT:    j .LBB61_78
+; CHECK-RV32-NEXT:  .LBB61_611: # %cond.load285
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 73
+; CHECK-RV32-NEXT:    li a4, 72
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 512
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_612
+; CHECK-RV32-NEXT:    j .LBB61_79
+; CHECK-RV32-NEXT:  .LBB61_612: # %cond.load289
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 74
+; CHECK-RV32-NEXT:    li a4, 73
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_613
+; CHECK-RV32-NEXT:    j .LBB61_80
+; CHECK-RV32-NEXT:  .LBB61_613: # %cond.load293
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 75
+; CHECK-RV32-NEXT:    li a4, 74
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 20
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_614
+; CHECK-RV32-NEXT:    j .LBB61_81
+; CHECK-RV32-NEXT:  .LBB61_614: # %cond.load297
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 76
+; CHECK-RV32-NEXT:    li a4, 75
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 19
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_615
+; CHECK-RV32-NEXT:    j .LBB61_82
+; CHECK-RV32-NEXT:  .LBB61_615: # %cond.load301
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 77
+; CHECK-RV32-NEXT:    li a4, 76
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 18
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_616
+; CHECK-RV32-NEXT:    j .LBB61_83
+; CHECK-RV32-NEXT:  .LBB61_616: # %cond.load305
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 78
+; CHECK-RV32-NEXT:    li a4, 77
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 17
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_617
+; CHECK-RV32-NEXT:    j .LBB61_84
+; CHECK-RV32-NEXT:  .LBB61_617: # %cond.load309
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 79
+; CHECK-RV32-NEXT:    li a4, 78
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 16
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_618
+; CHECK-RV32-NEXT:    j .LBB61_85
+; CHECK-RV32-NEXT:  .LBB61_618: # %cond.load313
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 80
+; CHECK-RV32-NEXT:    li a4, 79
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 15
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_619
+; CHECK-RV32-NEXT:    j .LBB61_86
+; CHECK-RV32-NEXT:  .LBB61_619: # %cond.load317
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 81
+; CHECK-RV32-NEXT:    li a4, 80
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 14
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_620
+; CHECK-RV32-NEXT:    j .LBB61_87
+; CHECK-RV32-NEXT:  .LBB61_620: # %cond.load321
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 82
+; CHECK-RV32-NEXT:    li a4, 81
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 13
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_621
+; CHECK-RV32-NEXT:    j .LBB61_88
+; CHECK-RV32-NEXT:  .LBB61_621: # %cond.load325
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 83
+; CHECK-RV32-NEXT:    li a4, 82
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 12
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_622
+; CHECK-RV32-NEXT:    j .LBB61_89
+; CHECK-RV32-NEXT:  .LBB61_622: # %cond.load329
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 84
+; CHECK-RV32-NEXT:    li a4, 83
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 11
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_623
+; CHECK-RV32-NEXT:    j .LBB61_90
+; CHECK-RV32-NEXT:  .LBB61_623: # %cond.load333
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 85
+; CHECK-RV32-NEXT:    li a4, 84
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 10
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_624
+; CHECK-RV32-NEXT:    j .LBB61_91
+; CHECK-RV32-NEXT:  .LBB61_624: # %cond.load337
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 86
+; CHECK-RV32-NEXT:    li a4, 85
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 9
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_625
+; CHECK-RV32-NEXT:    j .LBB61_92
+; CHECK-RV32-NEXT:  .LBB61_625: # %cond.load341
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 87
+; CHECK-RV32-NEXT:    li a4, 86
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 8
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_626
+; CHECK-RV32-NEXT:    j .LBB61_93
+; CHECK-RV32-NEXT:  .LBB61_626: # %cond.load345
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 88
+; CHECK-RV32-NEXT:    li a4, 87
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 7
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_627
+; CHECK-RV32-NEXT:    j .LBB61_94
+; CHECK-RV32-NEXT:  .LBB61_627: # %cond.load349
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 89
+; CHECK-RV32-NEXT:    li a4, 88
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 6
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_628
+; CHECK-RV32-NEXT:    j .LBB61_95
+; CHECK-RV32-NEXT:  .LBB61_628: # %cond.load353
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 90
+; CHECK-RV32-NEXT:    li a4, 89
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 5
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_629
+; CHECK-RV32-NEXT:    j .LBB61_96
+; CHECK-RV32-NEXT:  .LBB61_629: # %cond.load357
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 91
+; CHECK-RV32-NEXT:    li a4, 90
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 4
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_630
+; CHECK-RV32-NEXT:    j .LBB61_97
+; CHECK-RV32-NEXT:  .LBB61_630: # %cond.load361
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 92
+; CHECK-RV32-NEXT:    li a4, 91
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 3
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_631
+; CHECK-RV32-NEXT:    j .LBB61_98
+; CHECK-RV32-NEXT:  .LBB61_631: # %cond.load365
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 93
+; CHECK-RV32-NEXT:    li a4, 92
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 2
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_1027
+; CHECK-RV32-NEXT:    j .LBB61_99
+; CHECK-RV32-NEXT:  .LBB61_1027: # %cond.load365
+; CHECK-RV32-NEXT:    j .LBB61_100
+; CHECK-RV32-NEXT:  .LBB61_632: # %cond.load377
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 96
+; CHECK-RV32-NEXT:    li a4, 95
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 1
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_633
+; CHECK-RV32-NEXT:    j .LBB61_104
+; CHECK-RV32-NEXT:  .LBB61_633: # %cond.load381
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 97
+; CHECK-RV32-NEXT:    li a4, 96
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 2
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_634
+; CHECK-RV32-NEXT:    j .LBB61_105
+; CHECK-RV32-NEXT:  .LBB61_634: # %cond.load385
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 98
+; CHECK-RV32-NEXT:    li a4, 97
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 4
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_635
+; CHECK-RV32-NEXT:    j .LBB61_106
+; CHECK-RV32-NEXT:  .LBB61_635: # %cond.load389
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 99
+; CHECK-RV32-NEXT:    li a4, 98
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 8
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_636
+; CHECK-RV32-NEXT:    j .LBB61_107
+; CHECK-RV32-NEXT:  .LBB61_636: # %cond.load393
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 100
+; CHECK-RV32-NEXT:    li a4, 99
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 16
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_637
+; CHECK-RV32-NEXT:    j .LBB61_108
+; CHECK-RV32-NEXT:  .LBB61_637: # %cond.load397
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 101
+; CHECK-RV32-NEXT:    li a4, 100
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 32
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_638
+; CHECK-RV32-NEXT:    j .LBB61_109
+; CHECK-RV32-NEXT:  .LBB61_638: # %cond.load401
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 102
+; CHECK-RV32-NEXT:    li a4, 101
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 64
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_639
+; CHECK-RV32-NEXT:    j .LBB61_110
+; CHECK-RV32-NEXT:  .LBB61_639: # %cond.load405
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 103
+; CHECK-RV32-NEXT:    li a4, 102
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 128
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_640
+; CHECK-RV32-NEXT:    j .LBB61_111
+; CHECK-RV32-NEXT:  .LBB61_640: # %cond.load409
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 104
+; CHECK-RV32-NEXT:    li a4, 103
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 256
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_641
+; CHECK-RV32-NEXT:    j .LBB61_112
+; CHECK-RV32-NEXT:  .LBB61_641: # %cond.load413
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 105
+; CHECK-RV32-NEXT:    li a4, 104
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 512
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_642
+; CHECK-RV32-NEXT:    j .LBB61_113
+; CHECK-RV32-NEXT:  .LBB61_642: # %cond.load417
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 106
+; CHECK-RV32-NEXT:    li a4, 105
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_643
+; CHECK-RV32-NEXT:    j .LBB61_114
+; CHECK-RV32-NEXT:  .LBB61_643: # %cond.load421
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 107
+; CHECK-RV32-NEXT:    li a4, 106
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 20
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_644
+; CHECK-RV32-NEXT:    j .LBB61_115
+; CHECK-RV32-NEXT:  .LBB61_644: # %cond.load425
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 108
+; CHECK-RV32-NEXT:    li a4, 107
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 19
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_645
+; CHECK-RV32-NEXT:    j .LBB61_116
+; CHECK-RV32-NEXT:  .LBB61_645: # %cond.load429
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 109
+; CHECK-RV32-NEXT:    li a4, 108
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 18
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_646
+; CHECK-RV32-NEXT:    j .LBB61_117
+; CHECK-RV32-NEXT:  .LBB61_646: # %cond.load433
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 110
+; CHECK-RV32-NEXT:    li a4, 109
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 17
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_647
+; CHECK-RV32-NEXT:    j .LBB61_118
+; CHECK-RV32-NEXT:  .LBB61_647: # %cond.load437
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 111
+; CHECK-RV32-NEXT:    li a4, 110
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 16
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_648
+; CHECK-RV32-NEXT:    j .LBB61_119
+; CHECK-RV32-NEXT:  .LBB61_648: # %cond.load441
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 112
+; CHECK-RV32-NEXT:    li a4, 111
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 15
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_649
+; CHECK-RV32-NEXT:    j .LBB61_120
+; CHECK-RV32-NEXT:  .LBB61_649: # %cond.load445
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 113
+; CHECK-RV32-NEXT:    li a4, 112
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 14
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_650
+; CHECK-RV32-NEXT:    j .LBB61_121
+; CHECK-RV32-NEXT:  .LBB61_650: # %cond.load449
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 114
+; CHECK-RV32-NEXT:    li a4, 113
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 13
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_651
+; CHECK-RV32-NEXT:    j .LBB61_122
+; CHECK-RV32-NEXT:  .LBB61_651: # %cond.load453
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 115
+; CHECK-RV32-NEXT:    li a4, 114
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 12
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_652
+; CHECK-RV32-NEXT:    j .LBB61_123
+; CHECK-RV32-NEXT:  .LBB61_652: # %cond.load457
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 116
+; CHECK-RV32-NEXT:    li a4, 115
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 11
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_653
+; CHECK-RV32-NEXT:    j .LBB61_124
+; CHECK-RV32-NEXT:  .LBB61_653: # %cond.load461
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 117
+; CHECK-RV32-NEXT:    li a4, 116
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 10
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_654
+; CHECK-RV32-NEXT:    j .LBB61_125
+; CHECK-RV32-NEXT:  .LBB61_654: # %cond.load465
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 118
+; CHECK-RV32-NEXT:    li a4, 117
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 9
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_655
+; CHECK-RV32-NEXT:    j .LBB61_126
+; CHECK-RV32-NEXT:  .LBB61_655: # %cond.load469
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 119
+; CHECK-RV32-NEXT:    li a4, 118
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 8
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_656
+; CHECK-RV32-NEXT:    j .LBB61_127
+; CHECK-RV32-NEXT:  .LBB61_656: # %cond.load473
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 120
+; CHECK-RV32-NEXT:    li a4, 119
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 7
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_657
+; CHECK-RV32-NEXT:    j .LBB61_128
+; CHECK-RV32-NEXT:  .LBB61_657: # %cond.load477
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 121
+; CHECK-RV32-NEXT:    li a4, 120
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 6
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_658
+; CHECK-RV32-NEXT:    j .LBB61_129
+; CHECK-RV32-NEXT:  .LBB61_658: # %cond.load481
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 122
+; CHECK-RV32-NEXT:    li a4, 121
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 5
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_659
+; CHECK-RV32-NEXT:    j .LBB61_130
+; CHECK-RV32-NEXT:  .LBB61_659: # %cond.load485
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 123
+; CHECK-RV32-NEXT:    li a4, 122
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 4
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_660
+; CHECK-RV32-NEXT:    j .LBB61_131
+; CHECK-RV32-NEXT:  .LBB61_660: # %cond.load489
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 124
+; CHECK-RV32-NEXT:    li a4, 123
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 3
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_661
+; CHECK-RV32-NEXT:    j .LBB61_132
+; CHECK-RV32-NEXT:  .LBB61_661: # %cond.load493
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a3
+; CHECK-RV32-NEXT:    vmv8r.v v16, v8
+; CHECK-RV32-NEXT:    li a3, 125
+; CHECK-RV32-NEXT:    li a4, 124
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v16, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v16
+; CHECK-RV32-NEXT:    slli a3, a2, 2
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_1028
+; CHECK-RV32-NEXT:    j .LBB61_133
+; CHECK-RV32-NEXT:  .LBB61_1028: # %cond.load493
+; CHECK-RV32-NEXT:    j .LBB61_134
+; CHECK-RV32-NEXT:  .LBB61_662: # %cond.load505
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v18, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 128
+; CHECK-RV32-NEXT:    li a4, 127
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v18, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv2r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 1
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_663
+; CHECK-RV32-NEXT:    j .LBB61_138
+; CHECK-RV32-NEXT:  .LBB61_663: # %cond.load509
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 129
+; CHECK-RV32-NEXT:    li a4, 128
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 2
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_664
+; CHECK-RV32-NEXT:    j .LBB61_139
+; CHECK-RV32-NEXT:  .LBB61_664: # %cond.load513
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 130
+; CHECK-RV32-NEXT:    li a4, 129
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 4
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_665
+; CHECK-RV32-NEXT:    j .LBB61_140
+; CHECK-RV32-NEXT:  .LBB61_665: # %cond.load517
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 131
+; CHECK-RV32-NEXT:    li a4, 130
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 8
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_666
+; CHECK-RV32-NEXT:    j .LBB61_141
+; CHECK-RV32-NEXT:  .LBB61_666: # %cond.load521
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 132
+; CHECK-RV32-NEXT:    li a4, 131
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 16
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_667
+; CHECK-RV32-NEXT:    j .LBB61_142
+; CHECK-RV32-NEXT:  .LBB61_667: # %cond.load525
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 133
+; CHECK-RV32-NEXT:    li a4, 132
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 32
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_668
+; CHECK-RV32-NEXT:    j .LBB61_143
+; CHECK-RV32-NEXT:  .LBB61_668: # %cond.load529
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 134
+; CHECK-RV32-NEXT:    li a4, 133
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 64
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_669
+; CHECK-RV32-NEXT:    j .LBB61_144
+; CHECK-RV32-NEXT:  .LBB61_669: # %cond.load533
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 135
+; CHECK-RV32-NEXT:    li a4, 134
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 128
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_670
+; CHECK-RV32-NEXT:    j .LBB61_145
+; CHECK-RV32-NEXT:  .LBB61_670: # %cond.load537
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 136
+; CHECK-RV32-NEXT:    li a4, 135
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 256
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_671
+; CHECK-RV32-NEXT:    j .LBB61_146
+; CHECK-RV32-NEXT:  .LBB61_671: # %cond.load541
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 137
+; CHECK-RV32-NEXT:    li a4, 136
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 512
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_672
+; CHECK-RV32-NEXT:    j .LBB61_147
+; CHECK-RV32-NEXT:  .LBB61_672: # %cond.load545
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 138
+; CHECK-RV32-NEXT:    li a4, 137
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_673
+; CHECK-RV32-NEXT:    j .LBB61_148
+; CHECK-RV32-NEXT:  .LBB61_673: # %cond.load549
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 139
+; CHECK-RV32-NEXT:    li a4, 138
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 20
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_674
+; CHECK-RV32-NEXT:    j .LBB61_149
+; CHECK-RV32-NEXT:  .LBB61_674: # %cond.load553
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 140
+; CHECK-RV32-NEXT:    li a4, 139
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 19
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_675
+; CHECK-RV32-NEXT:    j .LBB61_150
+; CHECK-RV32-NEXT:  .LBB61_675: # %cond.load557
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 141
+; CHECK-RV32-NEXT:    li a4, 140
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 18
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_676
+; CHECK-RV32-NEXT:    j .LBB61_151
+; CHECK-RV32-NEXT:  .LBB61_676: # %cond.load561
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 142
+; CHECK-RV32-NEXT:    li a4, 141
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 17
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_677
+; CHECK-RV32-NEXT:    j .LBB61_152
+; CHECK-RV32-NEXT:  .LBB61_677: # %cond.load565
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 143
+; CHECK-RV32-NEXT:    li a4, 142
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 16
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_678
+; CHECK-RV32-NEXT:    j .LBB61_153
+; CHECK-RV32-NEXT:  .LBB61_678: # %cond.load569
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 144
+; CHECK-RV32-NEXT:    li a4, 143
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 15
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_679
+; CHECK-RV32-NEXT:    j .LBB61_154
+; CHECK-RV32-NEXT:  .LBB61_679: # %cond.load573
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 145
+; CHECK-RV32-NEXT:    li a4, 144
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 14
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_680
+; CHECK-RV32-NEXT:    j .LBB61_155
+; CHECK-RV32-NEXT:  .LBB61_680: # %cond.load577
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 146
+; CHECK-RV32-NEXT:    li a4, 145
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 13
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_681
+; CHECK-RV32-NEXT:    j .LBB61_156
+; CHECK-RV32-NEXT:  .LBB61_681: # %cond.load581
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 147
+; CHECK-RV32-NEXT:    li a4, 146
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 12
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_682
+; CHECK-RV32-NEXT:    j .LBB61_157
+; CHECK-RV32-NEXT:  .LBB61_682: # %cond.load585
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 148
+; CHECK-RV32-NEXT:    li a4, 147
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 11
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_683
+; CHECK-RV32-NEXT:    j .LBB61_158
+; CHECK-RV32-NEXT:  .LBB61_683: # %cond.load589
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 149
+; CHECK-RV32-NEXT:    li a4, 148
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 10
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_684
+; CHECK-RV32-NEXT:    j .LBB61_159
+; CHECK-RV32-NEXT:  .LBB61_684: # %cond.load593
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 150
+; CHECK-RV32-NEXT:    li a4, 149
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 9
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_685
+; CHECK-RV32-NEXT:    j .LBB61_160
+; CHECK-RV32-NEXT:  .LBB61_685: # %cond.load597
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 151
+; CHECK-RV32-NEXT:    li a4, 150
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 8
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_686
+; CHECK-RV32-NEXT:    j .LBB61_161
+; CHECK-RV32-NEXT:  .LBB61_686: # %cond.load601
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 152
+; CHECK-RV32-NEXT:    li a4, 151
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 7
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_687
+; CHECK-RV32-NEXT:    j .LBB61_162
+; CHECK-RV32-NEXT:  .LBB61_687: # %cond.load605
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 153
+; CHECK-RV32-NEXT:    li a4, 152
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 6
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_688
+; CHECK-RV32-NEXT:    j .LBB61_163
+; CHECK-RV32-NEXT:  .LBB61_688: # %cond.load609
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 154
+; CHECK-RV32-NEXT:    li a4, 153
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 5
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_689
+; CHECK-RV32-NEXT:    j .LBB61_164
+; CHECK-RV32-NEXT:  .LBB61_689: # %cond.load613
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 155
+; CHECK-RV32-NEXT:    li a4, 154
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 4
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_690
+; CHECK-RV32-NEXT:    j .LBB61_165
+; CHECK-RV32-NEXT:  .LBB61_690: # %cond.load617
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 156
+; CHECK-RV32-NEXT:    li a4, 155
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 3
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_691
+; CHECK-RV32-NEXT:    j .LBB61_166
+; CHECK-RV32-NEXT:  .LBB61_691: # %cond.load621
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 157
+; CHECK-RV32-NEXT:    li a4, 156
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 2
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_1029
+; CHECK-RV32-NEXT:    j .LBB61_167
+; CHECK-RV32-NEXT:  .LBB61_1029: # %cond.load621
+; CHECK-RV32-NEXT:    j .LBB61_168
+; CHECK-RV32-NEXT:  .LBB61_692: # %cond.load633
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 160
+; CHECK-RV32-NEXT:    li a4, 159
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 1
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_693
+; CHECK-RV32-NEXT:    j .LBB61_172
+; CHECK-RV32-NEXT:  .LBB61_693: # %cond.load637
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 161
+; CHECK-RV32-NEXT:    li a4, 160
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 2
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_694
+; CHECK-RV32-NEXT:    j .LBB61_173
+; CHECK-RV32-NEXT:  .LBB61_694: # %cond.load641
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 162
+; CHECK-RV32-NEXT:    li a4, 161
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 4
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_695
+; CHECK-RV32-NEXT:    j .LBB61_174
+; CHECK-RV32-NEXT:  .LBB61_695: # %cond.load645
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 163
+; CHECK-RV32-NEXT:    li a4, 162
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 8
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_696
+; CHECK-RV32-NEXT:    j .LBB61_175
+; CHECK-RV32-NEXT:  .LBB61_696: # %cond.load649
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 164
+; CHECK-RV32-NEXT:    li a4, 163
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 16
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_697
+; CHECK-RV32-NEXT:    j .LBB61_176
+; CHECK-RV32-NEXT:  .LBB61_697: # %cond.load653
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 165
+; CHECK-RV32-NEXT:    li a4, 164
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 32
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_698
+; CHECK-RV32-NEXT:    j .LBB61_177
+; CHECK-RV32-NEXT:  .LBB61_698: # %cond.load657
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 166
+; CHECK-RV32-NEXT:    li a4, 165
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 64
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_699
+; CHECK-RV32-NEXT:    j .LBB61_178
+; CHECK-RV32-NEXT:  .LBB61_699: # %cond.load661
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 167
+; CHECK-RV32-NEXT:    li a4, 166
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 128
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_700
+; CHECK-RV32-NEXT:    j .LBB61_179
+; CHECK-RV32-NEXT:  .LBB61_700: # %cond.load665
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 168
+; CHECK-RV32-NEXT:    li a4, 167
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 256
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_701
+; CHECK-RV32-NEXT:    j .LBB61_180
+; CHECK-RV32-NEXT:  .LBB61_701: # %cond.load669
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 169
+; CHECK-RV32-NEXT:    li a4, 168
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 512
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_702
+; CHECK-RV32-NEXT:    j .LBB61_181
+; CHECK-RV32-NEXT:  .LBB61_702: # %cond.load673
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 170
+; CHECK-RV32-NEXT:    li a4, 169
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_703
+; CHECK-RV32-NEXT:    j .LBB61_182
+; CHECK-RV32-NEXT:  .LBB61_703: # %cond.load677
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 171
+; CHECK-RV32-NEXT:    li a4, 170
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 20
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_704
+; CHECK-RV32-NEXT:    j .LBB61_183
+; CHECK-RV32-NEXT:  .LBB61_704: # %cond.load681
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 172
+; CHECK-RV32-NEXT:    li a4, 171
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 19
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_705
+; CHECK-RV32-NEXT:    j .LBB61_184
+; CHECK-RV32-NEXT:  .LBB61_705: # %cond.load685
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 173
+; CHECK-RV32-NEXT:    li a4, 172
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 18
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_706
+; CHECK-RV32-NEXT:    j .LBB61_185
+; CHECK-RV32-NEXT:  .LBB61_706: # %cond.load689
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 174
+; CHECK-RV32-NEXT:    li a4, 173
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 17
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_707
+; CHECK-RV32-NEXT:    j .LBB61_186
+; CHECK-RV32-NEXT:  .LBB61_707: # %cond.load693
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 175
+; CHECK-RV32-NEXT:    li a4, 174
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 16
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_708
+; CHECK-RV32-NEXT:    j .LBB61_187
+; CHECK-RV32-NEXT:  .LBB61_708: # %cond.load697
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 176
+; CHECK-RV32-NEXT:    li a4, 175
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 15
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_709
+; CHECK-RV32-NEXT:    j .LBB61_188
+; CHECK-RV32-NEXT:  .LBB61_709: # %cond.load701
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 177
+; CHECK-RV32-NEXT:    li a4, 176
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 14
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_710
+; CHECK-RV32-NEXT:    j .LBB61_189
+; CHECK-RV32-NEXT:  .LBB61_710: # %cond.load705
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 178
+; CHECK-RV32-NEXT:    li a4, 177
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 13
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_711
+; CHECK-RV32-NEXT:    j .LBB61_190
+; CHECK-RV32-NEXT:  .LBB61_711: # %cond.load709
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 179
+; CHECK-RV32-NEXT:    li a4, 178
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 12
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_712
+; CHECK-RV32-NEXT:    j .LBB61_191
+; CHECK-RV32-NEXT:  .LBB61_712: # %cond.load713
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 180
+; CHECK-RV32-NEXT:    li a4, 179
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 11
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_713
+; CHECK-RV32-NEXT:    j .LBB61_192
+; CHECK-RV32-NEXT:  .LBB61_713: # %cond.load717
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 181
+; CHECK-RV32-NEXT:    li a4, 180
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 10
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_714
+; CHECK-RV32-NEXT:    j .LBB61_193
+; CHECK-RV32-NEXT:  .LBB61_714: # %cond.load721
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 182
+; CHECK-RV32-NEXT:    li a4, 181
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 9
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_715
+; CHECK-RV32-NEXT:    j .LBB61_194
+; CHECK-RV32-NEXT:  .LBB61_715: # %cond.load725
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 183
+; CHECK-RV32-NEXT:    li a4, 182
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 8
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_716
+; CHECK-RV32-NEXT:    j .LBB61_195
+; CHECK-RV32-NEXT:  .LBB61_716: # %cond.load729
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 184
+; CHECK-RV32-NEXT:    li a4, 183
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 7
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_717
+; CHECK-RV32-NEXT:    j .LBB61_196
+; CHECK-RV32-NEXT:  .LBB61_717: # %cond.load733
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 185
+; CHECK-RV32-NEXT:    li a4, 184
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 6
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_718
+; CHECK-RV32-NEXT:    j .LBB61_197
+; CHECK-RV32-NEXT:  .LBB61_718: # %cond.load737
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 186
+; CHECK-RV32-NEXT:    li a4, 185
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 5
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_719
+; CHECK-RV32-NEXT:    j .LBB61_198
+; CHECK-RV32-NEXT:  .LBB61_719: # %cond.load741
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 187
+; CHECK-RV32-NEXT:    li a4, 186
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 4
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_720
+; CHECK-RV32-NEXT:    j .LBB61_199
+; CHECK-RV32-NEXT:  .LBB61_720: # %cond.load745
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 188
+; CHECK-RV32-NEXT:    li a4, 187
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 3
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_721
+; CHECK-RV32-NEXT:    j .LBB61_200
+; CHECK-RV32-NEXT:  .LBB61_721: # %cond.load749
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 189
+; CHECK-RV32-NEXT:    li a4, 188
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 2
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_1030
+; CHECK-RV32-NEXT:    j .LBB61_201
+; CHECK-RV32-NEXT:  .LBB61_1030: # %cond.load749
+; CHECK-RV32-NEXT:    j .LBB61_202
+; CHECK-RV32-NEXT:  .LBB61_722: # %cond.load761
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 192
+; CHECK-RV32-NEXT:    li a4, 191
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 1
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_723
+; CHECK-RV32-NEXT:    j .LBB61_206
+; CHECK-RV32-NEXT:  .LBB61_723: # %cond.load765
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 193
+; CHECK-RV32-NEXT:    li a4, 192
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 2
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_724
+; CHECK-RV32-NEXT:    j .LBB61_207
+; CHECK-RV32-NEXT:  .LBB61_724: # %cond.load769
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 194
+; CHECK-RV32-NEXT:    li a4, 193
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 4
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_725
+; CHECK-RV32-NEXT:    j .LBB61_208
+; CHECK-RV32-NEXT:  .LBB61_725: # %cond.load773
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 195
+; CHECK-RV32-NEXT:    li a4, 194
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 8
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_726
+; CHECK-RV32-NEXT:    j .LBB61_209
+; CHECK-RV32-NEXT:  .LBB61_726: # %cond.load777
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 196
+; CHECK-RV32-NEXT:    li a4, 195
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 16
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_727
+; CHECK-RV32-NEXT:    j .LBB61_210
+; CHECK-RV32-NEXT:  .LBB61_727: # %cond.load781
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 197
+; CHECK-RV32-NEXT:    li a4, 196
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 32
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_728
+; CHECK-RV32-NEXT:    j .LBB61_211
+; CHECK-RV32-NEXT:  .LBB61_728: # %cond.load785
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 198
+; CHECK-RV32-NEXT:    li a4, 197
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 64
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_729
+; CHECK-RV32-NEXT:    j .LBB61_212
+; CHECK-RV32-NEXT:  .LBB61_729: # %cond.load789
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 199
+; CHECK-RV32-NEXT:    li a4, 198
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 128
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_730
+; CHECK-RV32-NEXT:    j .LBB61_213
+; CHECK-RV32-NEXT:  .LBB61_730: # %cond.load793
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 200
+; CHECK-RV32-NEXT:    li a4, 199
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 256
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_731
+; CHECK-RV32-NEXT:    j .LBB61_214
+; CHECK-RV32-NEXT:  .LBB61_731: # %cond.load797
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 201
+; CHECK-RV32-NEXT:    li a4, 200
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 512
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_732
+; CHECK-RV32-NEXT:    j .LBB61_215
+; CHECK-RV32-NEXT:  .LBB61_732: # %cond.load801
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 202
+; CHECK-RV32-NEXT:    li a4, 201
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_733
+; CHECK-RV32-NEXT:    j .LBB61_216
+; CHECK-RV32-NEXT:  .LBB61_733: # %cond.load805
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 203
+; CHECK-RV32-NEXT:    li a4, 202
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 20
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_734
+; CHECK-RV32-NEXT:    j .LBB61_217
+; CHECK-RV32-NEXT:  .LBB61_734: # %cond.load809
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 204
+; CHECK-RV32-NEXT:    li a4, 203
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 19
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_735
+; CHECK-RV32-NEXT:    j .LBB61_218
+; CHECK-RV32-NEXT:  .LBB61_735: # %cond.load813
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 205
+; CHECK-RV32-NEXT:    li a4, 204
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 18
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_736
+; CHECK-RV32-NEXT:    j .LBB61_219
+; CHECK-RV32-NEXT:  .LBB61_736: # %cond.load817
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 206
+; CHECK-RV32-NEXT:    li a4, 205
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 17
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_737
+; CHECK-RV32-NEXT:    j .LBB61_220
+; CHECK-RV32-NEXT:  .LBB61_737: # %cond.load821
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 207
+; CHECK-RV32-NEXT:    li a4, 206
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 16
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_738
+; CHECK-RV32-NEXT:    j .LBB61_221
+; CHECK-RV32-NEXT:  .LBB61_738: # %cond.load825
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 208
+; CHECK-RV32-NEXT:    li a4, 207
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 15
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_739
+; CHECK-RV32-NEXT:    j .LBB61_222
+; CHECK-RV32-NEXT:  .LBB61_739: # %cond.load829
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 209
+; CHECK-RV32-NEXT:    li a4, 208
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 14
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_740
+; CHECK-RV32-NEXT:    j .LBB61_223
+; CHECK-RV32-NEXT:  .LBB61_740: # %cond.load833
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 210
+; CHECK-RV32-NEXT:    li a4, 209
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 13
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_741
+; CHECK-RV32-NEXT:    j .LBB61_224
+; CHECK-RV32-NEXT:  .LBB61_741: # %cond.load837
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 211
+; CHECK-RV32-NEXT:    li a4, 210
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 12
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_742
+; CHECK-RV32-NEXT:    j .LBB61_225
+; CHECK-RV32-NEXT:  .LBB61_742: # %cond.load841
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 212
+; CHECK-RV32-NEXT:    li a4, 211
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 11
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_743
+; CHECK-RV32-NEXT:    j .LBB61_226
+; CHECK-RV32-NEXT:  .LBB61_743: # %cond.load845
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 213
+; CHECK-RV32-NEXT:    li a4, 212
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 10
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_744
+; CHECK-RV32-NEXT:    j .LBB61_227
+; CHECK-RV32-NEXT:  .LBB61_744: # %cond.load849
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 214
+; CHECK-RV32-NEXT:    li a4, 213
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 9
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_745
+; CHECK-RV32-NEXT:    j .LBB61_228
+; CHECK-RV32-NEXT:  .LBB61_745: # %cond.load853
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 215
+; CHECK-RV32-NEXT:    li a4, 214
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 8
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_746
+; CHECK-RV32-NEXT:    j .LBB61_229
+; CHECK-RV32-NEXT:  .LBB61_746: # %cond.load857
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 216
+; CHECK-RV32-NEXT:    li a4, 215
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 7
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_747
+; CHECK-RV32-NEXT:    j .LBB61_230
+; CHECK-RV32-NEXT:  .LBB61_747: # %cond.load861
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 217
+; CHECK-RV32-NEXT:    li a4, 216
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 6
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_748
+; CHECK-RV32-NEXT:    j .LBB61_231
+; CHECK-RV32-NEXT:  .LBB61_748: # %cond.load865
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 218
+; CHECK-RV32-NEXT:    li a4, 217
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 5
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_749
+; CHECK-RV32-NEXT:    j .LBB61_232
+; CHECK-RV32-NEXT:  .LBB61_749: # %cond.load869
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 219
+; CHECK-RV32-NEXT:    li a4, 218
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 4
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_750
+; CHECK-RV32-NEXT:    j .LBB61_233
+; CHECK-RV32-NEXT:  .LBB61_750: # %cond.load873
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 220
+; CHECK-RV32-NEXT:    li a4, 219
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 3
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_751
+; CHECK-RV32-NEXT:    j .LBB61_234
+; CHECK-RV32-NEXT:  .LBB61_751: # %cond.load877
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 221
+; CHECK-RV32-NEXT:    li a4, 220
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a2, a3, 2
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_1031
+; CHECK-RV32-NEXT:    j .LBB61_235
+; CHECK-RV32-NEXT:  .LBB61_1031: # %cond.load877
+; CHECK-RV32-NEXT:    j .LBB61_236
+; CHECK-RV32-NEXT:  .LBB61_752: # %cond.load889
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 224
+; CHECK-RV32-NEXT:    li a4, 223
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 1
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_753
+; CHECK-RV32-NEXT:    j .LBB61_240
+; CHECK-RV32-NEXT:  .LBB61_753: # %cond.load893
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 225
+; CHECK-RV32-NEXT:    li a4, 224
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 2
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_754
+; CHECK-RV32-NEXT:    j .LBB61_241
+; CHECK-RV32-NEXT:  .LBB61_754: # %cond.load897
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 226
+; CHECK-RV32-NEXT:    li a4, 225
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 4
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_755
+; CHECK-RV32-NEXT:    j .LBB61_242
+; CHECK-RV32-NEXT:  .LBB61_755: # %cond.load901
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 227
+; CHECK-RV32-NEXT:    li a4, 226
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 8
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_756
+; CHECK-RV32-NEXT:    j .LBB61_243
+; CHECK-RV32-NEXT:  .LBB61_756: # %cond.load905
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 228
+; CHECK-RV32-NEXT:    li a4, 227
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 16
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_757
+; CHECK-RV32-NEXT:    j .LBB61_244
+; CHECK-RV32-NEXT:  .LBB61_757: # %cond.load909
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 229
+; CHECK-RV32-NEXT:    li a4, 228
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 32
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_758
+; CHECK-RV32-NEXT:    j .LBB61_245
+; CHECK-RV32-NEXT:  .LBB61_758: # %cond.load913
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 230
+; CHECK-RV32-NEXT:    li a4, 229
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 64
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_759
+; CHECK-RV32-NEXT:    j .LBB61_246
+; CHECK-RV32-NEXT:  .LBB61_759: # %cond.load917
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 231
+; CHECK-RV32-NEXT:    li a4, 230
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 128
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_760
+; CHECK-RV32-NEXT:    j .LBB61_247
+; CHECK-RV32-NEXT:  .LBB61_760: # %cond.load921
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 232
+; CHECK-RV32-NEXT:    li a4, 231
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 256
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_761
+; CHECK-RV32-NEXT:    j .LBB61_248
+; CHECK-RV32-NEXT:  .LBB61_761: # %cond.load925
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 233
+; CHECK-RV32-NEXT:    li a4, 232
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 512
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_762
+; CHECK-RV32-NEXT:    j .LBB61_249
+; CHECK-RV32-NEXT:  .LBB61_762: # %cond.load929
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 234
+; CHECK-RV32-NEXT:    li a4, 233
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_763
+; CHECK-RV32-NEXT:    j .LBB61_250
+; CHECK-RV32-NEXT:  .LBB61_763: # %cond.load933
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 235
+; CHECK-RV32-NEXT:    li a4, 234
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 20
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_764
+; CHECK-RV32-NEXT:    j .LBB61_251
+; CHECK-RV32-NEXT:  .LBB61_764: # %cond.load937
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 236
+; CHECK-RV32-NEXT:    li a4, 235
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 19
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_765
+; CHECK-RV32-NEXT:    j .LBB61_252
+; CHECK-RV32-NEXT:  .LBB61_765: # %cond.load941
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 237
+; CHECK-RV32-NEXT:    li a4, 236
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 18
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_766
+; CHECK-RV32-NEXT:    j .LBB61_253
+; CHECK-RV32-NEXT:  .LBB61_766: # %cond.load945
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 238
+; CHECK-RV32-NEXT:    li a4, 237
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 17
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_767
+; CHECK-RV32-NEXT:    j .LBB61_254
+; CHECK-RV32-NEXT:  .LBB61_767: # %cond.load949
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 239
+; CHECK-RV32-NEXT:    li a4, 238
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 16
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_768
+; CHECK-RV32-NEXT:    j .LBB61_255
+; CHECK-RV32-NEXT:  .LBB61_768: # %cond.load953
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 240
+; CHECK-RV32-NEXT:    li a4, 239
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 15
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_769
+; CHECK-RV32-NEXT:    j .LBB61_256
+; CHECK-RV32-NEXT:  .LBB61_769: # %cond.load957
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 241
+; CHECK-RV32-NEXT:    li a4, 240
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 14
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_770
+; CHECK-RV32-NEXT:    j .LBB61_257
+; CHECK-RV32-NEXT:  .LBB61_770: # %cond.load961
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 242
+; CHECK-RV32-NEXT:    li a4, 241
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 13
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_771
+; CHECK-RV32-NEXT:    j .LBB61_258
+; CHECK-RV32-NEXT:  .LBB61_771: # %cond.load965
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 243
+; CHECK-RV32-NEXT:    li a4, 242
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 12
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_772
+; CHECK-RV32-NEXT:    j .LBB61_259
+; CHECK-RV32-NEXT:  .LBB61_772: # %cond.load969
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 244
+; CHECK-RV32-NEXT:    li a4, 243
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 11
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_773
+; CHECK-RV32-NEXT:    j .LBB61_260
+; CHECK-RV32-NEXT:  .LBB61_773: # %cond.load973
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 245
+; CHECK-RV32-NEXT:    li a4, 244
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 10
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_774
+; CHECK-RV32-NEXT:    j .LBB61_261
+; CHECK-RV32-NEXT:  .LBB61_774: # %cond.load977
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 246
+; CHECK-RV32-NEXT:    li a4, 245
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 9
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_775
+; CHECK-RV32-NEXT:    j .LBB61_262
+; CHECK-RV32-NEXT:  .LBB61_775: # %cond.load981
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 247
+; CHECK-RV32-NEXT:    li a4, 246
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 8
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_776
+; CHECK-RV32-NEXT:    j .LBB61_263
+; CHECK-RV32-NEXT:  .LBB61_776: # %cond.load985
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 248
+; CHECK-RV32-NEXT:    li a4, 247
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 7
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_777
+; CHECK-RV32-NEXT:    j .LBB61_264
+; CHECK-RV32-NEXT:  .LBB61_777: # %cond.load989
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 249
+; CHECK-RV32-NEXT:    li a4, 248
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 6
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_778
+; CHECK-RV32-NEXT:    j .LBB61_265
+; CHECK-RV32-NEXT:  .LBB61_778: # %cond.load993
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 250
+; CHECK-RV32-NEXT:    li a4, 249
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 5
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_779
+; CHECK-RV32-NEXT:    j .LBB61_266
+; CHECK-RV32-NEXT:  .LBB61_779: # %cond.load997
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 251
+; CHECK-RV32-NEXT:    li a4, 250
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 4
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_780
+; CHECK-RV32-NEXT:    j .LBB61_267
+; CHECK-RV32-NEXT:  .LBB61_780: # %cond.load1001
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 252
+; CHECK-RV32-NEXT:    li a4, 251
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 3
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_781
+; CHECK-RV32-NEXT:    j .LBB61_268
+; CHECK-RV32-NEXT:  .LBB61_781: # %cond.load1005
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a3, 253
+; CHECK-RV32-NEXT:    li a4, 252
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    slli a3, a2, 2
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_1032
+; CHECK-RV32-NEXT:    j .LBB61_269
+; CHECK-RV32-NEXT:  .LBB61_1032: # %cond.load1005
+; CHECK-RV32-NEXT:    j .LBB61_270
+; CHECK-RV32-NEXT:  .LBB61_782: # %cond.load1017
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v20, a2
+; CHECK-RV32-NEXT:    vmv8r.v v24, v8
+; CHECK-RV32-NEXT:    li a2, 256
+; CHECK-RV32-NEXT:    li a4, 255
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v20, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    vmv4r.v v24, v8
+; CHECK-RV32-NEXT:    vmv8r.v v8, v24
+; CHECK-RV32-NEXT:    andi a2, a3, 1
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_783
+; CHECK-RV32-NEXT:    j .LBB61_274
+; CHECK-RV32-NEXT:  .LBB61_783: # %cond.load1021
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 257
+; CHECK-RV32-NEXT:    li a4, 256
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 2
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_784
+; CHECK-RV32-NEXT:    j .LBB61_275
+; CHECK-RV32-NEXT:  .LBB61_784: # %cond.load1025
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 258
+; CHECK-RV32-NEXT:    li a4, 257
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 4
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_785
+; CHECK-RV32-NEXT:    j .LBB61_276
+; CHECK-RV32-NEXT:  .LBB61_785: # %cond.load1029
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 259
+; CHECK-RV32-NEXT:    li a4, 258
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 8
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_786
+; CHECK-RV32-NEXT:    j .LBB61_277
+; CHECK-RV32-NEXT:  .LBB61_786: # %cond.load1033
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 260
+; CHECK-RV32-NEXT:    li a4, 259
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 16
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_787
+; CHECK-RV32-NEXT:    j .LBB61_278
+; CHECK-RV32-NEXT:  .LBB61_787: # %cond.load1037
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 261
+; CHECK-RV32-NEXT:    li a4, 260
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 32
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_788
+; CHECK-RV32-NEXT:    j .LBB61_279
+; CHECK-RV32-NEXT:  .LBB61_788: # %cond.load1041
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 262
+; CHECK-RV32-NEXT:    li a4, 261
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 64
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_789
+; CHECK-RV32-NEXT:    j .LBB61_280
+; CHECK-RV32-NEXT:  .LBB61_789: # %cond.load1045
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 263
+; CHECK-RV32-NEXT:    li a4, 262
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 128
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_790
+; CHECK-RV32-NEXT:    j .LBB61_281
+; CHECK-RV32-NEXT:  .LBB61_790: # %cond.load1049
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 264
+; CHECK-RV32-NEXT:    li a4, 263
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 256
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_791
+; CHECK-RV32-NEXT:    j .LBB61_282
+; CHECK-RV32-NEXT:  .LBB61_791: # %cond.load1053
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 265
+; CHECK-RV32-NEXT:    li a4, 264
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 512
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_792
+; CHECK-RV32-NEXT:    j .LBB61_283
+; CHECK-RV32-NEXT:  .LBB61_792: # %cond.load1057
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 266
+; CHECK-RV32-NEXT:    li a4, 265
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_793
+; CHECK-RV32-NEXT:    j .LBB61_284
+; CHECK-RV32-NEXT:  .LBB61_793: # %cond.load1061
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 267
+; CHECK-RV32-NEXT:    li a4, 266
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 20
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_794
+; CHECK-RV32-NEXT:    j .LBB61_285
+; CHECK-RV32-NEXT:  .LBB61_794: # %cond.load1065
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 268
+; CHECK-RV32-NEXT:    li a4, 267
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 19
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_795
+; CHECK-RV32-NEXT:    j .LBB61_286
+; CHECK-RV32-NEXT:  .LBB61_795: # %cond.load1069
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 269
+; CHECK-RV32-NEXT:    li a4, 268
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 18
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_796
+; CHECK-RV32-NEXT:    j .LBB61_287
+; CHECK-RV32-NEXT:  .LBB61_796: # %cond.load1073
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 270
+; CHECK-RV32-NEXT:    li a4, 269
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 17
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_797
+; CHECK-RV32-NEXT:    j .LBB61_288
+; CHECK-RV32-NEXT:  .LBB61_797: # %cond.load1077
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 271
+; CHECK-RV32-NEXT:    li a4, 270
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 16
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_798
+; CHECK-RV32-NEXT:    j .LBB61_289
+; CHECK-RV32-NEXT:  .LBB61_798: # %cond.load1081
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 272
+; CHECK-RV32-NEXT:    li a4, 271
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 15
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_799
+; CHECK-RV32-NEXT:    j .LBB61_290
+; CHECK-RV32-NEXT:  .LBB61_799: # %cond.load1085
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 273
+; CHECK-RV32-NEXT:    li a4, 272
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 14
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_800
+; CHECK-RV32-NEXT:    j .LBB61_291
+; CHECK-RV32-NEXT:  .LBB61_800: # %cond.load1089
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 274
+; CHECK-RV32-NEXT:    li a4, 273
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 13
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_801
+; CHECK-RV32-NEXT:    j .LBB61_292
+; CHECK-RV32-NEXT:  .LBB61_801: # %cond.load1093
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 275
+; CHECK-RV32-NEXT:    li a4, 274
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 12
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_802
+; CHECK-RV32-NEXT:    j .LBB61_293
+; CHECK-RV32-NEXT:  .LBB61_802: # %cond.load1097
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 276
+; CHECK-RV32-NEXT:    li a4, 275
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 11
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_803
+; CHECK-RV32-NEXT:    j .LBB61_294
+; CHECK-RV32-NEXT:  .LBB61_803: # %cond.load1101
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 277
+; CHECK-RV32-NEXT:    li a4, 276
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 10
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_804
+; CHECK-RV32-NEXT:    j .LBB61_295
+; CHECK-RV32-NEXT:  .LBB61_804: # %cond.load1105
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 278
+; CHECK-RV32-NEXT:    li a4, 277
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 9
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_805
+; CHECK-RV32-NEXT:    j .LBB61_296
+; CHECK-RV32-NEXT:  .LBB61_805: # %cond.load1109
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 279
+; CHECK-RV32-NEXT:    li a4, 278
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 8
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_806
+; CHECK-RV32-NEXT:    j .LBB61_297
+; CHECK-RV32-NEXT:  .LBB61_806: # %cond.load1113
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 280
+; CHECK-RV32-NEXT:    li a4, 279
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 7
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_807
+; CHECK-RV32-NEXT:    j .LBB61_298
+; CHECK-RV32-NEXT:  .LBB61_807: # %cond.load1117
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 281
+; CHECK-RV32-NEXT:    li a4, 280
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 6
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_808
+; CHECK-RV32-NEXT:    j .LBB61_299
+; CHECK-RV32-NEXT:  .LBB61_808: # %cond.load1121
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 282
+; CHECK-RV32-NEXT:    li a4, 281
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 5
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_809
+; CHECK-RV32-NEXT:    j .LBB61_300
+; CHECK-RV32-NEXT:  .LBB61_809: # %cond.load1125
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 283
+; CHECK-RV32-NEXT:    li a4, 282
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 4
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_810
+; CHECK-RV32-NEXT:    j .LBB61_301
+; CHECK-RV32-NEXT:  .LBB61_810: # %cond.load1129
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 284
+; CHECK-RV32-NEXT:    li a4, 283
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 3
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_811
+; CHECK-RV32-NEXT:    j .LBB61_302
+; CHECK-RV32-NEXT:  .LBB61_811: # %cond.load1133
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 285
+; CHECK-RV32-NEXT:    li a4, 284
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 2
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_1033
+; CHECK-RV32-NEXT:    j .LBB61_303
+; CHECK-RV32-NEXT:  .LBB61_1033: # %cond.load1133
+; CHECK-RV32-NEXT:    j .LBB61_304
+; CHECK-RV32-NEXT:  .LBB61_812: # %cond.load1145
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 288
+; CHECK-RV32-NEXT:    li a4, 287
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 1
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_813
+; CHECK-RV32-NEXT:    j .LBB61_308
+; CHECK-RV32-NEXT:  .LBB61_813: # %cond.load1149
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 289
+; CHECK-RV32-NEXT:    li a4, 288
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 2
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_814
+; CHECK-RV32-NEXT:    j .LBB61_309
+; CHECK-RV32-NEXT:  .LBB61_814: # %cond.load1153
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 290
+; CHECK-RV32-NEXT:    li a4, 289
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 4
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_815
+; CHECK-RV32-NEXT:    j .LBB61_310
+; CHECK-RV32-NEXT:  .LBB61_815: # %cond.load1157
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 291
+; CHECK-RV32-NEXT:    li a4, 290
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 8
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_816
+; CHECK-RV32-NEXT:    j .LBB61_311
+; CHECK-RV32-NEXT:  .LBB61_816: # %cond.load1161
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 292
+; CHECK-RV32-NEXT:    li a4, 291
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 16
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_817
+; CHECK-RV32-NEXT:    j .LBB61_312
+; CHECK-RV32-NEXT:  .LBB61_817: # %cond.load1165
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 293
+; CHECK-RV32-NEXT:    li a4, 292
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 32
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_818
+; CHECK-RV32-NEXT:    j .LBB61_313
+; CHECK-RV32-NEXT:  .LBB61_818: # %cond.load1169
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 294
+; CHECK-RV32-NEXT:    li a4, 293
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 64
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_819
+; CHECK-RV32-NEXT:    j .LBB61_314
+; CHECK-RV32-NEXT:  .LBB61_819: # %cond.load1173
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 295
+; CHECK-RV32-NEXT:    li a4, 294
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 128
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_820
+; CHECK-RV32-NEXT:    j .LBB61_315
+; CHECK-RV32-NEXT:  .LBB61_820: # %cond.load1177
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 296
+; CHECK-RV32-NEXT:    li a4, 295
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 256
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_821
+; CHECK-RV32-NEXT:    j .LBB61_316
+; CHECK-RV32-NEXT:  .LBB61_821: # %cond.load1181
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 297
+; CHECK-RV32-NEXT:    li a4, 296
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 512
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_822
+; CHECK-RV32-NEXT:    j .LBB61_317
+; CHECK-RV32-NEXT:  .LBB61_822: # %cond.load1185
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 298
+; CHECK-RV32-NEXT:    li a4, 297
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_823
+; CHECK-RV32-NEXT:    j .LBB61_318
+; CHECK-RV32-NEXT:  .LBB61_823: # %cond.load1189
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 299
+; CHECK-RV32-NEXT:    li a4, 298
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 20
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_824
+; CHECK-RV32-NEXT:    j .LBB61_319
+; CHECK-RV32-NEXT:  .LBB61_824: # %cond.load1193
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 300
+; CHECK-RV32-NEXT:    li a4, 299
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 19
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_825
+; CHECK-RV32-NEXT:    j .LBB61_320
+; CHECK-RV32-NEXT:  .LBB61_825: # %cond.load1197
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 301
+; CHECK-RV32-NEXT:    li a4, 300
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 18
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_826
+; CHECK-RV32-NEXT:    j .LBB61_321
+; CHECK-RV32-NEXT:  .LBB61_826: # %cond.load1201
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 302
+; CHECK-RV32-NEXT:    li a4, 301
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 17
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_827
+; CHECK-RV32-NEXT:    j .LBB61_322
+; CHECK-RV32-NEXT:  .LBB61_827: # %cond.load1205
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 303
+; CHECK-RV32-NEXT:    li a4, 302
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 16
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_828
+; CHECK-RV32-NEXT:    j .LBB61_323
+; CHECK-RV32-NEXT:  .LBB61_828: # %cond.load1209
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 304
+; CHECK-RV32-NEXT:    li a4, 303
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 15
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_829
+; CHECK-RV32-NEXT:    j .LBB61_324
+; CHECK-RV32-NEXT:  .LBB61_829: # %cond.load1213
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 305
+; CHECK-RV32-NEXT:    li a4, 304
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 14
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_830
+; CHECK-RV32-NEXT:    j .LBB61_325
+; CHECK-RV32-NEXT:  .LBB61_830: # %cond.load1217
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 306
+; CHECK-RV32-NEXT:    li a4, 305
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 13
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_831
+; CHECK-RV32-NEXT:    j .LBB61_326
+; CHECK-RV32-NEXT:  .LBB61_831: # %cond.load1221
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 307
+; CHECK-RV32-NEXT:    li a4, 306
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 12
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_832
+; CHECK-RV32-NEXT:    j .LBB61_327
+; CHECK-RV32-NEXT:  .LBB61_832: # %cond.load1225
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 308
+; CHECK-RV32-NEXT:    li a4, 307
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 11
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_833
+; CHECK-RV32-NEXT:    j .LBB61_328
+; CHECK-RV32-NEXT:  .LBB61_833: # %cond.load1229
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 309
+; CHECK-RV32-NEXT:    li a4, 308
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 10
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_834
+; CHECK-RV32-NEXT:    j .LBB61_329
+; CHECK-RV32-NEXT:  .LBB61_834: # %cond.load1233
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 310
+; CHECK-RV32-NEXT:    li a4, 309
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 9
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_835
+; CHECK-RV32-NEXT:    j .LBB61_330
+; CHECK-RV32-NEXT:  .LBB61_835: # %cond.load1237
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 311
+; CHECK-RV32-NEXT:    li a4, 310
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 8
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_836
+; CHECK-RV32-NEXT:    j .LBB61_331
+; CHECK-RV32-NEXT:  .LBB61_836: # %cond.load1241
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 312
+; CHECK-RV32-NEXT:    li a4, 311
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 7
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_837
+; CHECK-RV32-NEXT:    j .LBB61_332
+; CHECK-RV32-NEXT:  .LBB61_837: # %cond.load1245
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 313
+; CHECK-RV32-NEXT:    li a4, 312
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 6
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_838
+; CHECK-RV32-NEXT:    j .LBB61_333
+; CHECK-RV32-NEXT:  .LBB61_838: # %cond.load1249
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 314
+; CHECK-RV32-NEXT:    li a4, 313
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 5
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_839
+; CHECK-RV32-NEXT:    j .LBB61_334
+; CHECK-RV32-NEXT:  .LBB61_839: # %cond.load1253
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 315
+; CHECK-RV32-NEXT:    li a4, 314
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 4
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_840
+; CHECK-RV32-NEXT:    j .LBB61_335
+; CHECK-RV32-NEXT:  .LBB61_840: # %cond.load1257
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 316
+; CHECK-RV32-NEXT:    li a4, 315
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 3
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_841
+; CHECK-RV32-NEXT:    j .LBB61_336
+; CHECK-RV32-NEXT:  .LBB61_841: # %cond.load1261
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 317
+; CHECK-RV32-NEXT:    li a4, 316
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 2
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_1034
+; CHECK-RV32-NEXT:    j .LBB61_337
+; CHECK-RV32-NEXT:  .LBB61_1034: # %cond.load1261
+; CHECK-RV32-NEXT:    j .LBB61_338
+; CHECK-RV32-NEXT:  .LBB61_842: # %cond.load1273
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 320
+; CHECK-RV32-NEXT:    li a4, 319
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 1
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_843
+; CHECK-RV32-NEXT:    j .LBB61_342
+; CHECK-RV32-NEXT:  .LBB61_843: # %cond.load1277
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 321
+; CHECK-RV32-NEXT:    li a4, 320
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 2
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_844
+; CHECK-RV32-NEXT:    j .LBB61_343
+; CHECK-RV32-NEXT:  .LBB61_844: # %cond.load1281
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 322
+; CHECK-RV32-NEXT:    li a4, 321
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 4
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_845
+; CHECK-RV32-NEXT:    j .LBB61_344
+; CHECK-RV32-NEXT:  .LBB61_845: # %cond.load1285
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 323
+; CHECK-RV32-NEXT:    li a4, 322
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 8
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_846
+; CHECK-RV32-NEXT:    j .LBB61_345
+; CHECK-RV32-NEXT:  .LBB61_846: # %cond.load1289
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 324
+; CHECK-RV32-NEXT:    li a4, 323
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 16
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_847
+; CHECK-RV32-NEXT:    j .LBB61_346
+; CHECK-RV32-NEXT:  .LBB61_847: # %cond.load1293
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 325
+; CHECK-RV32-NEXT:    li a4, 324
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 32
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_848
+; CHECK-RV32-NEXT:    j .LBB61_347
+; CHECK-RV32-NEXT:  .LBB61_848: # %cond.load1297
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 326
+; CHECK-RV32-NEXT:    li a4, 325
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 64
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_849
+; CHECK-RV32-NEXT:    j .LBB61_348
+; CHECK-RV32-NEXT:  .LBB61_849: # %cond.load1301
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 327
+; CHECK-RV32-NEXT:    li a4, 326
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 128
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_850
+; CHECK-RV32-NEXT:    j .LBB61_349
+; CHECK-RV32-NEXT:  .LBB61_850: # %cond.load1305
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 328
+; CHECK-RV32-NEXT:    li a4, 327
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 256
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_851
+; CHECK-RV32-NEXT:    j .LBB61_350
+; CHECK-RV32-NEXT:  .LBB61_851: # %cond.load1309
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 329
+; CHECK-RV32-NEXT:    li a4, 328
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 512
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_852
+; CHECK-RV32-NEXT:    j .LBB61_351
+; CHECK-RV32-NEXT:  .LBB61_852: # %cond.load1313
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 330
+; CHECK-RV32-NEXT:    li a4, 329
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_853
+; CHECK-RV32-NEXT:    j .LBB61_352
+; CHECK-RV32-NEXT:  .LBB61_853: # %cond.load1317
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 331
+; CHECK-RV32-NEXT:    li a4, 330
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 20
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_854
+; CHECK-RV32-NEXT:    j .LBB61_353
+; CHECK-RV32-NEXT:  .LBB61_854: # %cond.load1321
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 332
+; CHECK-RV32-NEXT:    li a4, 331
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 19
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_855
+; CHECK-RV32-NEXT:    j .LBB61_354
+; CHECK-RV32-NEXT:  .LBB61_855: # %cond.load1325
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 333
+; CHECK-RV32-NEXT:    li a4, 332
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 18
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_856
+; CHECK-RV32-NEXT:    j .LBB61_355
+; CHECK-RV32-NEXT:  .LBB61_856: # %cond.load1329
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 334
+; CHECK-RV32-NEXT:    li a4, 333
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 17
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_857
+; CHECK-RV32-NEXT:    j .LBB61_356
+; CHECK-RV32-NEXT:  .LBB61_857: # %cond.load1333
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 335
+; CHECK-RV32-NEXT:    li a4, 334
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 16
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_858
+; CHECK-RV32-NEXT:    j .LBB61_357
+; CHECK-RV32-NEXT:  .LBB61_858: # %cond.load1337
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 336
+; CHECK-RV32-NEXT:    li a4, 335
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 15
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_859
+; CHECK-RV32-NEXT:    j .LBB61_358
+; CHECK-RV32-NEXT:  .LBB61_859: # %cond.load1341
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 337
+; CHECK-RV32-NEXT:    li a4, 336
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 14
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_860
+; CHECK-RV32-NEXT:    j .LBB61_359
+; CHECK-RV32-NEXT:  .LBB61_860: # %cond.load1345
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 338
+; CHECK-RV32-NEXT:    li a4, 337
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 13
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_861
+; CHECK-RV32-NEXT:    j .LBB61_360
+; CHECK-RV32-NEXT:  .LBB61_861: # %cond.load1349
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 339
+; CHECK-RV32-NEXT:    li a4, 338
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 12
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_862
+; CHECK-RV32-NEXT:    j .LBB61_361
+; CHECK-RV32-NEXT:  .LBB61_862: # %cond.load1353
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 340
+; CHECK-RV32-NEXT:    li a4, 339
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 11
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_863
+; CHECK-RV32-NEXT:    j .LBB61_362
+; CHECK-RV32-NEXT:  .LBB61_863: # %cond.load1357
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 341
+; CHECK-RV32-NEXT:    li a4, 340
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 10
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_864
+; CHECK-RV32-NEXT:    j .LBB61_363
+; CHECK-RV32-NEXT:  .LBB61_864: # %cond.load1361
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 342
+; CHECK-RV32-NEXT:    li a4, 341
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 9
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_865
+; CHECK-RV32-NEXT:    j .LBB61_364
+; CHECK-RV32-NEXT:  .LBB61_865: # %cond.load1365
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 343
+; CHECK-RV32-NEXT:    li a4, 342
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 8
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_866
+; CHECK-RV32-NEXT:    j .LBB61_365
+; CHECK-RV32-NEXT:  .LBB61_866: # %cond.load1369
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 344
+; CHECK-RV32-NEXT:    li a4, 343
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 7
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_867
+; CHECK-RV32-NEXT:    j .LBB61_366
+; CHECK-RV32-NEXT:  .LBB61_867: # %cond.load1373
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 345
+; CHECK-RV32-NEXT:    li a4, 344
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 6
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_868
+; CHECK-RV32-NEXT:    j .LBB61_367
+; CHECK-RV32-NEXT:  .LBB61_868: # %cond.load1377
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 346
+; CHECK-RV32-NEXT:    li a4, 345
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 5
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_869
+; CHECK-RV32-NEXT:    j .LBB61_368
+; CHECK-RV32-NEXT:  .LBB61_869: # %cond.load1381
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 347
+; CHECK-RV32-NEXT:    li a4, 346
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 4
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_870
+; CHECK-RV32-NEXT:    j .LBB61_369
+; CHECK-RV32-NEXT:  .LBB61_870: # %cond.load1385
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 348
+; CHECK-RV32-NEXT:    li a4, 347
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 3
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_871
+; CHECK-RV32-NEXT:    j .LBB61_370
+; CHECK-RV32-NEXT:  .LBB61_871: # %cond.load1389
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 349
+; CHECK-RV32-NEXT:    li a4, 348
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 2
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_1035
+; CHECK-RV32-NEXT:    j .LBB61_371
+; CHECK-RV32-NEXT:  .LBB61_1035: # %cond.load1389
+; CHECK-RV32-NEXT:    j .LBB61_372
+; CHECK-RV32-NEXT:  .LBB61_872: # %cond.load1401
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 352
+; CHECK-RV32-NEXT:    li a4, 351
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 1
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_873
+; CHECK-RV32-NEXT:    j .LBB61_376
+; CHECK-RV32-NEXT:  .LBB61_873: # %cond.load1405
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 353
+; CHECK-RV32-NEXT:    li a4, 352
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 2
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_874
+; CHECK-RV32-NEXT:    j .LBB61_377
+; CHECK-RV32-NEXT:  .LBB61_874: # %cond.load1409
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 354
+; CHECK-RV32-NEXT:    li a4, 353
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 4
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_875
+; CHECK-RV32-NEXT:    j .LBB61_378
+; CHECK-RV32-NEXT:  .LBB61_875: # %cond.load1413
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 355
+; CHECK-RV32-NEXT:    li a4, 354
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 8
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_876
+; CHECK-RV32-NEXT:    j .LBB61_379
+; CHECK-RV32-NEXT:  .LBB61_876: # %cond.load1417
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 356
+; CHECK-RV32-NEXT:    li a4, 355
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 16
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_877
+; CHECK-RV32-NEXT:    j .LBB61_380
+; CHECK-RV32-NEXT:  .LBB61_877: # %cond.load1421
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 357
+; CHECK-RV32-NEXT:    li a4, 356
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 32
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_878
+; CHECK-RV32-NEXT:    j .LBB61_381
+; CHECK-RV32-NEXT:  .LBB61_878: # %cond.load1425
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 358
+; CHECK-RV32-NEXT:    li a4, 357
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 64
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_879
+; CHECK-RV32-NEXT:    j .LBB61_382
+; CHECK-RV32-NEXT:  .LBB61_879: # %cond.load1429
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 359
+; CHECK-RV32-NEXT:    li a4, 358
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 128
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_880
+; CHECK-RV32-NEXT:    j .LBB61_383
+; CHECK-RV32-NEXT:  .LBB61_880: # %cond.load1433
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 360
+; CHECK-RV32-NEXT:    li a4, 359
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 256
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_881
+; CHECK-RV32-NEXT:    j .LBB61_384
+; CHECK-RV32-NEXT:  .LBB61_881: # %cond.load1437
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 361
+; CHECK-RV32-NEXT:    li a4, 360
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 512
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_882
+; CHECK-RV32-NEXT:    j .LBB61_385
+; CHECK-RV32-NEXT:  .LBB61_882: # %cond.load1441
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 362
+; CHECK-RV32-NEXT:    li a4, 361
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_883
+; CHECK-RV32-NEXT:    j .LBB61_386
+; CHECK-RV32-NEXT:  .LBB61_883: # %cond.load1445
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 363
+; CHECK-RV32-NEXT:    li a4, 362
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 20
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_884
+; CHECK-RV32-NEXT:    j .LBB61_387
+; CHECK-RV32-NEXT:  .LBB61_884: # %cond.load1449
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 364
+; CHECK-RV32-NEXT:    li a4, 363
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 19
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_885
+; CHECK-RV32-NEXT:    j .LBB61_388
+; CHECK-RV32-NEXT:  .LBB61_885: # %cond.load1453
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 365
+; CHECK-RV32-NEXT:    li a4, 364
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 18
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_886
+; CHECK-RV32-NEXT:    j .LBB61_389
+; CHECK-RV32-NEXT:  .LBB61_886: # %cond.load1457
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 366
+; CHECK-RV32-NEXT:    li a4, 365
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 17
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_887
+; CHECK-RV32-NEXT:    j .LBB61_390
+; CHECK-RV32-NEXT:  .LBB61_887: # %cond.load1461
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 367
+; CHECK-RV32-NEXT:    li a4, 366
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 16
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_888
+; CHECK-RV32-NEXT:    j .LBB61_391
+; CHECK-RV32-NEXT:  .LBB61_888: # %cond.load1465
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 368
+; CHECK-RV32-NEXT:    li a4, 367
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 15
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_889
+; CHECK-RV32-NEXT:    j .LBB61_392
+; CHECK-RV32-NEXT:  .LBB61_889: # %cond.load1469
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 369
+; CHECK-RV32-NEXT:    li a4, 368
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 14
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_890
+; CHECK-RV32-NEXT:    j .LBB61_393
+; CHECK-RV32-NEXT:  .LBB61_890: # %cond.load1473
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 370
+; CHECK-RV32-NEXT:    li a4, 369
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 13
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_891
+; CHECK-RV32-NEXT:    j .LBB61_394
+; CHECK-RV32-NEXT:  .LBB61_891: # %cond.load1477
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 371
+; CHECK-RV32-NEXT:    li a4, 370
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 12
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_892
+; CHECK-RV32-NEXT:    j .LBB61_395
+; CHECK-RV32-NEXT:  .LBB61_892: # %cond.load1481
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 372
+; CHECK-RV32-NEXT:    li a4, 371
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 11
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_893
+; CHECK-RV32-NEXT:    j .LBB61_396
+; CHECK-RV32-NEXT:  .LBB61_893: # %cond.load1485
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 373
+; CHECK-RV32-NEXT:    li a4, 372
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 10
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_894
+; CHECK-RV32-NEXT:    j .LBB61_397
+; CHECK-RV32-NEXT:  .LBB61_894: # %cond.load1489
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 374
+; CHECK-RV32-NEXT:    li a4, 373
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 9
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_895
+; CHECK-RV32-NEXT:    j .LBB61_398
+; CHECK-RV32-NEXT:  .LBB61_895: # %cond.load1493
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 375
+; CHECK-RV32-NEXT:    li a4, 374
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 8
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_896
+; CHECK-RV32-NEXT:    j .LBB61_399
+; CHECK-RV32-NEXT:  .LBB61_896: # %cond.load1497
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 376
+; CHECK-RV32-NEXT:    li a4, 375
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 7
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_897
+; CHECK-RV32-NEXT:    j .LBB61_400
+; CHECK-RV32-NEXT:  .LBB61_897: # %cond.load1501
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 377
+; CHECK-RV32-NEXT:    li a4, 376
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 6
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_898
+; CHECK-RV32-NEXT:    j .LBB61_401
+; CHECK-RV32-NEXT:  .LBB61_898: # %cond.load1505
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 378
+; CHECK-RV32-NEXT:    li a4, 377
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 5
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_899
+; CHECK-RV32-NEXT:    j .LBB61_402
+; CHECK-RV32-NEXT:  .LBB61_899: # %cond.load1509
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 379
+; CHECK-RV32-NEXT:    li a4, 378
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 4
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_900
+; CHECK-RV32-NEXT:    j .LBB61_403
+; CHECK-RV32-NEXT:  .LBB61_900: # %cond.load1513
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 380
+; CHECK-RV32-NEXT:    li a4, 379
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 3
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_901
+; CHECK-RV32-NEXT:    j .LBB61_404
+; CHECK-RV32-NEXT:  .LBB61_901: # %cond.load1517
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 381
+; CHECK-RV32-NEXT:    li a4, 380
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 2
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_1036
+; CHECK-RV32-NEXT:    j .LBB61_405
+; CHECK-RV32-NEXT:  .LBB61_1036: # %cond.load1517
+; CHECK-RV32-NEXT:    j .LBB61_406
+; CHECK-RV32-NEXT:  .LBB61_902: # %cond.load1529
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 384
+; CHECK-RV32-NEXT:    li a4, 383
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 1
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_903
+; CHECK-RV32-NEXT:    j .LBB61_410
+; CHECK-RV32-NEXT:  .LBB61_903: # %cond.load1533
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 385
+; CHECK-RV32-NEXT:    li a4, 384
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 2
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_904
+; CHECK-RV32-NEXT:    j .LBB61_411
+; CHECK-RV32-NEXT:  .LBB61_904: # %cond.load1537
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 386
+; CHECK-RV32-NEXT:    li a4, 385
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 4
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_905
+; CHECK-RV32-NEXT:    j .LBB61_412
+; CHECK-RV32-NEXT:  .LBB61_905: # %cond.load1541
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 387
+; CHECK-RV32-NEXT:    li a4, 386
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 8
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_906
+; CHECK-RV32-NEXT:    j .LBB61_413
+; CHECK-RV32-NEXT:  .LBB61_906: # %cond.load1545
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 388
+; CHECK-RV32-NEXT:    li a4, 387
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 16
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_907
+; CHECK-RV32-NEXT:    j .LBB61_414
+; CHECK-RV32-NEXT:  .LBB61_907: # %cond.load1549
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 389
+; CHECK-RV32-NEXT:    li a4, 388
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 32
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_908
+; CHECK-RV32-NEXT:    j .LBB61_415
+; CHECK-RV32-NEXT:  .LBB61_908: # %cond.load1553
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 390
+; CHECK-RV32-NEXT:    li a4, 389
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 64
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_909
+; CHECK-RV32-NEXT:    j .LBB61_416
+; CHECK-RV32-NEXT:  .LBB61_909: # %cond.load1557
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 391
+; CHECK-RV32-NEXT:    li a4, 390
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 128
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_910
+; CHECK-RV32-NEXT:    j .LBB61_417
+; CHECK-RV32-NEXT:  .LBB61_910: # %cond.load1561
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 392
+; CHECK-RV32-NEXT:    li a4, 391
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 256
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_911
+; CHECK-RV32-NEXT:    j .LBB61_418
+; CHECK-RV32-NEXT:  .LBB61_911: # %cond.load1565
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 393
+; CHECK-RV32-NEXT:    li a4, 392
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 512
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_912
+; CHECK-RV32-NEXT:    j .LBB61_419
+; CHECK-RV32-NEXT:  .LBB61_912: # %cond.load1569
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 394
+; CHECK-RV32-NEXT:    li a4, 393
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_913
+; CHECK-RV32-NEXT:    j .LBB61_420
+; CHECK-RV32-NEXT:  .LBB61_913: # %cond.load1573
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 395
+; CHECK-RV32-NEXT:    li a4, 394
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 20
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_914
+; CHECK-RV32-NEXT:    j .LBB61_421
+; CHECK-RV32-NEXT:  .LBB61_914: # %cond.load1577
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 396
+; CHECK-RV32-NEXT:    li a4, 395
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 19
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_915
+; CHECK-RV32-NEXT:    j .LBB61_422
+; CHECK-RV32-NEXT:  .LBB61_915: # %cond.load1581
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 397
+; CHECK-RV32-NEXT:    li a4, 396
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 18
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_916
+; CHECK-RV32-NEXT:    j .LBB61_423
+; CHECK-RV32-NEXT:  .LBB61_916: # %cond.load1585
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 398
+; CHECK-RV32-NEXT:    li a4, 397
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 17
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_917
+; CHECK-RV32-NEXT:    j .LBB61_424
+; CHECK-RV32-NEXT:  .LBB61_917: # %cond.load1589
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 399
+; CHECK-RV32-NEXT:    li a4, 398
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 16
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_918
+; CHECK-RV32-NEXT:    j .LBB61_425
+; CHECK-RV32-NEXT:  .LBB61_918: # %cond.load1593
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 400
+; CHECK-RV32-NEXT:    li a4, 399
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 15
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_919
+; CHECK-RV32-NEXT:    j .LBB61_426
+; CHECK-RV32-NEXT:  .LBB61_919: # %cond.load1597
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 401
+; CHECK-RV32-NEXT:    li a4, 400
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 14
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_920
+; CHECK-RV32-NEXT:    j .LBB61_427
+; CHECK-RV32-NEXT:  .LBB61_920: # %cond.load1601
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 402
+; CHECK-RV32-NEXT:    li a4, 401
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 13
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_921
+; CHECK-RV32-NEXT:    j .LBB61_428
+; CHECK-RV32-NEXT:  .LBB61_921: # %cond.load1605
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 403
+; CHECK-RV32-NEXT:    li a4, 402
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 12
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_922
+; CHECK-RV32-NEXT:    j .LBB61_429
+; CHECK-RV32-NEXT:  .LBB61_922: # %cond.load1609
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 404
+; CHECK-RV32-NEXT:    li a4, 403
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 11
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_923
+; CHECK-RV32-NEXT:    j .LBB61_430
+; CHECK-RV32-NEXT:  .LBB61_923: # %cond.load1613
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 405
+; CHECK-RV32-NEXT:    li a4, 404
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 10
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_924
+; CHECK-RV32-NEXT:    j .LBB61_431
+; CHECK-RV32-NEXT:  .LBB61_924: # %cond.load1617
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 406
+; CHECK-RV32-NEXT:    li a4, 405
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 9
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_925
+; CHECK-RV32-NEXT:    j .LBB61_432
+; CHECK-RV32-NEXT:  .LBB61_925: # %cond.load1621
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 407
+; CHECK-RV32-NEXT:    li a4, 406
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 8
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_926
+; CHECK-RV32-NEXT:    j .LBB61_433
+; CHECK-RV32-NEXT:  .LBB61_926: # %cond.load1625
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 408
+; CHECK-RV32-NEXT:    li a4, 407
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 7
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_927
+; CHECK-RV32-NEXT:    j .LBB61_434
+; CHECK-RV32-NEXT:  .LBB61_927: # %cond.load1629
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 409
+; CHECK-RV32-NEXT:    li a4, 408
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 6
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_928
+; CHECK-RV32-NEXT:    j .LBB61_435
+; CHECK-RV32-NEXT:  .LBB61_928: # %cond.load1633
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 410
+; CHECK-RV32-NEXT:    li a4, 409
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 5
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_929
+; CHECK-RV32-NEXT:    j .LBB61_436
+; CHECK-RV32-NEXT:  .LBB61_929: # %cond.load1637
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 411
+; CHECK-RV32-NEXT:    li a4, 410
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 4
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_930
+; CHECK-RV32-NEXT:    j .LBB61_437
+; CHECK-RV32-NEXT:  .LBB61_930: # %cond.load1641
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 412
+; CHECK-RV32-NEXT:    li a4, 411
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 3
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_931
+; CHECK-RV32-NEXT:    j .LBB61_438
+; CHECK-RV32-NEXT:  .LBB61_931: # %cond.load1645
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 413
+; CHECK-RV32-NEXT:    li a4, 412
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 2
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_1037
+; CHECK-RV32-NEXT:    j .LBB61_439
+; CHECK-RV32-NEXT:  .LBB61_1037: # %cond.load1645
+; CHECK-RV32-NEXT:    j .LBB61_440
+; CHECK-RV32-NEXT:  .LBB61_932: # %cond.load1657
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 416
+; CHECK-RV32-NEXT:    li a4, 415
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 1
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_933
+; CHECK-RV32-NEXT:    j .LBB61_444
+; CHECK-RV32-NEXT:  .LBB61_933: # %cond.load1661
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 417
+; CHECK-RV32-NEXT:    li a4, 416
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 2
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_934
+; CHECK-RV32-NEXT:    j .LBB61_445
+; CHECK-RV32-NEXT:  .LBB61_934: # %cond.load1665
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 418
+; CHECK-RV32-NEXT:    li a4, 417
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 4
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_935
+; CHECK-RV32-NEXT:    j .LBB61_446
+; CHECK-RV32-NEXT:  .LBB61_935: # %cond.load1669
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 419
+; CHECK-RV32-NEXT:    li a4, 418
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 8
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_936
+; CHECK-RV32-NEXT:    j .LBB61_447
+; CHECK-RV32-NEXT:  .LBB61_936: # %cond.load1673
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 420
+; CHECK-RV32-NEXT:    li a4, 419
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 16
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_937
+; CHECK-RV32-NEXT:    j .LBB61_448
+; CHECK-RV32-NEXT:  .LBB61_937: # %cond.load1677
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 421
+; CHECK-RV32-NEXT:    li a4, 420
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 32
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_938
+; CHECK-RV32-NEXT:    j .LBB61_449
+; CHECK-RV32-NEXT:  .LBB61_938: # %cond.load1681
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 422
+; CHECK-RV32-NEXT:    li a4, 421
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 64
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_939
+; CHECK-RV32-NEXT:    j .LBB61_450
+; CHECK-RV32-NEXT:  .LBB61_939: # %cond.load1685
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 423
+; CHECK-RV32-NEXT:    li a4, 422
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 128
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_940
+; CHECK-RV32-NEXT:    j .LBB61_451
+; CHECK-RV32-NEXT:  .LBB61_940: # %cond.load1689
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 424
+; CHECK-RV32-NEXT:    li a4, 423
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 256
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_941
+; CHECK-RV32-NEXT:    j .LBB61_452
+; CHECK-RV32-NEXT:  .LBB61_941: # %cond.load1693
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 425
+; CHECK-RV32-NEXT:    li a4, 424
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 512
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_942
+; CHECK-RV32-NEXT:    j .LBB61_453
+; CHECK-RV32-NEXT:  .LBB61_942: # %cond.load1697
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 426
+; CHECK-RV32-NEXT:    li a4, 425
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a3, a2, 1024
+; CHECK-RV32-NEXT:    bnez a3, .LBB61_943
+; CHECK-RV32-NEXT:    j .LBB61_454
+; CHECK-RV32-NEXT:  .LBB61_943: # %cond.load1701
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 427
+; CHECK-RV32-NEXT:    li a4, 426
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 20
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_944
+; CHECK-RV32-NEXT:    j .LBB61_455
+; CHECK-RV32-NEXT:  .LBB61_944: # %cond.load1705
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 428
+; CHECK-RV32-NEXT:    li a4, 427
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 19
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_945
+; CHECK-RV32-NEXT:    j .LBB61_456
+; CHECK-RV32-NEXT:  .LBB61_945: # %cond.load1709
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 429
+; CHECK-RV32-NEXT:    li a4, 428
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 18
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_946
+; CHECK-RV32-NEXT:    j .LBB61_457
+; CHECK-RV32-NEXT:  .LBB61_946: # %cond.load1713
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 430
+; CHECK-RV32-NEXT:    li a4, 429
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 17
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_947
+; CHECK-RV32-NEXT:    j .LBB61_458
+; CHECK-RV32-NEXT:  .LBB61_947: # %cond.load1717
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 431
+; CHECK-RV32-NEXT:    li a4, 430
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 16
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_948
+; CHECK-RV32-NEXT:    j .LBB61_459
+; CHECK-RV32-NEXT:  .LBB61_948: # %cond.load1721
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 432
+; CHECK-RV32-NEXT:    li a4, 431
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 15
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_949
+; CHECK-RV32-NEXT:    j .LBB61_460
+; CHECK-RV32-NEXT:  .LBB61_949: # %cond.load1725
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 433
+; CHECK-RV32-NEXT:    li a4, 432
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 14
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_950
+; CHECK-RV32-NEXT:    j .LBB61_461
+; CHECK-RV32-NEXT:  .LBB61_950: # %cond.load1729
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 434
+; CHECK-RV32-NEXT:    li a4, 433
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 13
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_951
+; CHECK-RV32-NEXT:    j .LBB61_462
+; CHECK-RV32-NEXT:  .LBB61_951: # %cond.load1733
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 435
+; CHECK-RV32-NEXT:    li a4, 434
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 12
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_952
+; CHECK-RV32-NEXT:    j .LBB61_463
+; CHECK-RV32-NEXT:  .LBB61_952: # %cond.load1737
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 436
+; CHECK-RV32-NEXT:    li a4, 435
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 11
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_953
+; CHECK-RV32-NEXT:    j .LBB61_464
+; CHECK-RV32-NEXT:  .LBB61_953: # %cond.load1741
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 437
+; CHECK-RV32-NEXT:    li a4, 436
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 10
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_954
+; CHECK-RV32-NEXT:    j .LBB61_465
+; CHECK-RV32-NEXT:  .LBB61_954: # %cond.load1745
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 438
+; CHECK-RV32-NEXT:    li a4, 437
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 9
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_955
+; CHECK-RV32-NEXT:    j .LBB61_466
+; CHECK-RV32-NEXT:  .LBB61_955: # %cond.load1749
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 439
+; CHECK-RV32-NEXT:    li a4, 438
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 8
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_956
+; CHECK-RV32-NEXT:    j .LBB61_467
+; CHECK-RV32-NEXT:  .LBB61_956: # %cond.load1753
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 440
+; CHECK-RV32-NEXT:    li a4, 439
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 7
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_957
+; CHECK-RV32-NEXT:    j .LBB61_468
+; CHECK-RV32-NEXT:  .LBB61_957: # %cond.load1757
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 441
+; CHECK-RV32-NEXT:    li a4, 440
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 6
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_958
+; CHECK-RV32-NEXT:    j .LBB61_469
+; CHECK-RV32-NEXT:  .LBB61_958: # %cond.load1761
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 442
+; CHECK-RV32-NEXT:    li a4, 441
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 5
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_959
+; CHECK-RV32-NEXT:    j .LBB61_470
+; CHECK-RV32-NEXT:  .LBB61_959: # %cond.load1765
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 443
+; CHECK-RV32-NEXT:    li a4, 442
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 4
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_960
+; CHECK-RV32-NEXT:    j .LBB61_471
+; CHECK-RV32-NEXT:  .LBB61_960: # %cond.load1769
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 444
+; CHECK-RV32-NEXT:    li a4, 443
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 3
+; CHECK-RV32-NEXT:    bltz a3, .LBB61_961
+; CHECK-RV32-NEXT:    j .LBB61_472
+; CHECK-RV32-NEXT:  .LBB61_961: # %cond.load1773
+; CHECK-RV32-NEXT:    lbu a3, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a3
+; CHECK-RV32-NEXT:    li a3, 445
+; CHECK-RV32-NEXT:    li a4, 444
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a3, a2, 2
+; CHECK-RV32-NEXT:    bgez a3, .LBB61_1038
+; CHECK-RV32-NEXT:    j .LBB61_473
+; CHECK-RV32-NEXT:  .LBB61_1038: # %cond.load1773
+; CHECK-RV32-NEXT:    j .LBB61_474
+; CHECK-RV32-NEXT:  .LBB61_962: # %cond.load1785
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 448
+; CHECK-RV32-NEXT:    li a4, 447
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 1
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_963
+; CHECK-RV32-NEXT:    j .LBB61_478
+; CHECK-RV32-NEXT:  .LBB61_963: # %cond.load1789
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 449
+; CHECK-RV32-NEXT:    li a4, 448
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 2
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_964
+; CHECK-RV32-NEXT:    j .LBB61_479
+; CHECK-RV32-NEXT:  .LBB61_964: # %cond.load1793
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 450
+; CHECK-RV32-NEXT:    li a4, 449
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 4
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_965
+; CHECK-RV32-NEXT:    j .LBB61_480
+; CHECK-RV32-NEXT:  .LBB61_965: # %cond.load1797
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 451
+; CHECK-RV32-NEXT:    li a4, 450
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 8
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_966
+; CHECK-RV32-NEXT:    j .LBB61_481
+; CHECK-RV32-NEXT:  .LBB61_966: # %cond.load1801
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 452
+; CHECK-RV32-NEXT:    li a4, 451
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 16
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_967
+; CHECK-RV32-NEXT:    j .LBB61_482
+; CHECK-RV32-NEXT:  .LBB61_967: # %cond.load1805
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 453
+; CHECK-RV32-NEXT:    li a4, 452
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 32
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_968
+; CHECK-RV32-NEXT:    j .LBB61_483
+; CHECK-RV32-NEXT:  .LBB61_968: # %cond.load1809
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 454
+; CHECK-RV32-NEXT:    li a4, 453
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 64
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_969
+; CHECK-RV32-NEXT:    j .LBB61_484
+; CHECK-RV32-NEXT:  .LBB61_969: # %cond.load1813
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 455
+; CHECK-RV32-NEXT:    li a4, 454
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 128
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_970
+; CHECK-RV32-NEXT:    j .LBB61_485
+; CHECK-RV32-NEXT:  .LBB61_970: # %cond.load1817
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 456
+; CHECK-RV32-NEXT:    li a4, 455
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 256
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_971
+; CHECK-RV32-NEXT:    j .LBB61_486
+; CHECK-RV32-NEXT:  .LBB61_971: # %cond.load1821
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 457
+; CHECK-RV32-NEXT:    li a4, 456
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 512
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_972
+; CHECK-RV32-NEXT:    j .LBB61_487
+; CHECK-RV32-NEXT:  .LBB61_972: # %cond.load1825
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 458
+; CHECK-RV32-NEXT:    li a4, 457
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a3, 1024
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_973
+; CHECK-RV32-NEXT:    j .LBB61_488
+; CHECK-RV32-NEXT:  .LBB61_973: # %cond.load1829
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 459
+; CHECK-RV32-NEXT:    li a4, 458
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 20
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_974
+; CHECK-RV32-NEXT:    j .LBB61_489
+; CHECK-RV32-NEXT:  .LBB61_974: # %cond.load1833
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 460
+; CHECK-RV32-NEXT:    li a4, 459
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 19
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_975
+; CHECK-RV32-NEXT:    j .LBB61_490
+; CHECK-RV32-NEXT:  .LBB61_975: # %cond.load1837
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 461
+; CHECK-RV32-NEXT:    li a4, 460
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 18
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_976
+; CHECK-RV32-NEXT:    j .LBB61_491
+; CHECK-RV32-NEXT:  .LBB61_976: # %cond.load1841
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 462
+; CHECK-RV32-NEXT:    li a4, 461
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 17
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_977
+; CHECK-RV32-NEXT:    j .LBB61_492
+; CHECK-RV32-NEXT:  .LBB61_977: # %cond.load1845
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 463
+; CHECK-RV32-NEXT:    li a4, 462
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 16
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_978
+; CHECK-RV32-NEXT:    j .LBB61_493
+; CHECK-RV32-NEXT:  .LBB61_978: # %cond.load1849
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 464
+; CHECK-RV32-NEXT:    li a4, 463
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 15
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_979
+; CHECK-RV32-NEXT:    j .LBB61_494
+; CHECK-RV32-NEXT:  .LBB61_979: # %cond.load1853
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 465
+; CHECK-RV32-NEXT:    li a4, 464
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 14
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_980
+; CHECK-RV32-NEXT:    j .LBB61_495
+; CHECK-RV32-NEXT:  .LBB61_980: # %cond.load1857
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 466
+; CHECK-RV32-NEXT:    li a4, 465
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 13
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_981
+; CHECK-RV32-NEXT:    j .LBB61_496
+; CHECK-RV32-NEXT:  .LBB61_981: # %cond.load1861
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 467
+; CHECK-RV32-NEXT:    li a4, 466
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 12
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_982
+; CHECK-RV32-NEXT:    j .LBB61_497
+; CHECK-RV32-NEXT:  .LBB61_982: # %cond.load1865
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 468
+; CHECK-RV32-NEXT:    li a4, 467
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 11
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_983
+; CHECK-RV32-NEXT:    j .LBB61_498
+; CHECK-RV32-NEXT:  .LBB61_983: # %cond.load1869
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 469
+; CHECK-RV32-NEXT:    li a4, 468
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 10
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_984
+; CHECK-RV32-NEXT:    j .LBB61_499
+; CHECK-RV32-NEXT:  .LBB61_984: # %cond.load1873
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 470
+; CHECK-RV32-NEXT:    li a4, 469
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 9
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_985
+; CHECK-RV32-NEXT:    j .LBB61_500
+; CHECK-RV32-NEXT:  .LBB61_985: # %cond.load1877
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 471
+; CHECK-RV32-NEXT:    li a4, 470
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 8
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_986
+; CHECK-RV32-NEXT:    j .LBB61_501
+; CHECK-RV32-NEXT:  .LBB61_986: # %cond.load1881
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 472
+; CHECK-RV32-NEXT:    li a4, 471
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 7
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_987
+; CHECK-RV32-NEXT:    j .LBB61_502
+; CHECK-RV32-NEXT:  .LBB61_987: # %cond.load1885
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 473
+; CHECK-RV32-NEXT:    li a4, 472
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 6
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_988
+; CHECK-RV32-NEXT:    j .LBB61_503
+; CHECK-RV32-NEXT:  .LBB61_988: # %cond.load1889
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 474
+; CHECK-RV32-NEXT:    li a4, 473
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 5
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_989
+; CHECK-RV32-NEXT:    j .LBB61_504
+; CHECK-RV32-NEXT:  .LBB61_989: # %cond.load1893
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 475
+; CHECK-RV32-NEXT:    li a4, 474
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 4
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_990
+; CHECK-RV32-NEXT:    j .LBB61_505
+; CHECK-RV32-NEXT:  .LBB61_990: # %cond.load1897
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 476
+; CHECK-RV32-NEXT:    li a4, 475
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 3
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_991
+; CHECK-RV32-NEXT:    j .LBB61_506
+; CHECK-RV32-NEXT:  .LBB61_991: # %cond.load1901
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a4, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a4, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v24, a2
+; CHECK-RV32-NEXT:    li a2, 477
+; CHECK-RV32-NEXT:    li a4, 476
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v24, a4
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a3, 2
+; CHECK-RV32-NEXT:    bgez a2, .LBB61_1039
+; CHECK-RV32-NEXT:    j .LBB61_507
+; CHECK-RV32-NEXT:  .LBB61_1039: # %cond.load1901
+; CHECK-RV32-NEXT:    j .LBB61_508
+; CHECK-RV32-NEXT:  .LBB61_992: # %cond.load1913
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 480
+; CHECK-RV32-NEXT:    li a3, 479
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a1, 1
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_993
+; CHECK-RV32-NEXT:    j .LBB61_512
+; CHECK-RV32-NEXT:  .LBB61_993: # %cond.load1917
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 481
+; CHECK-RV32-NEXT:    li a3, 480
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a1, 2
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_994
+; CHECK-RV32-NEXT:    j .LBB61_513
+; CHECK-RV32-NEXT:  .LBB61_994: # %cond.load1921
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 482
+; CHECK-RV32-NEXT:    li a3, 481
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a1, 4
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_995
+; CHECK-RV32-NEXT:    j .LBB61_514
+; CHECK-RV32-NEXT:  .LBB61_995: # %cond.load1925
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 483
+; CHECK-RV32-NEXT:    li a3, 482
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a1, 8
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_996
+; CHECK-RV32-NEXT:    j .LBB61_515
+; CHECK-RV32-NEXT:  .LBB61_996: # %cond.load1929
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 484
+; CHECK-RV32-NEXT:    li a3, 483
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a1, 16
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_997
+; CHECK-RV32-NEXT:    j .LBB61_516
+; CHECK-RV32-NEXT:  .LBB61_997: # %cond.load1933
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 485
+; CHECK-RV32-NEXT:    li a3, 484
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a1, 32
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_998
+; CHECK-RV32-NEXT:    j .LBB61_517
+; CHECK-RV32-NEXT:  .LBB61_998: # %cond.load1937
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 486
+; CHECK-RV32-NEXT:    li a3, 485
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a1, 64
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_999
+; CHECK-RV32-NEXT:    j .LBB61_518
+; CHECK-RV32-NEXT:  .LBB61_999: # %cond.load1941
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 487
+; CHECK-RV32-NEXT:    li a3, 486
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a1, 128
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_1000
+; CHECK-RV32-NEXT:    j .LBB61_519
+; CHECK-RV32-NEXT:  .LBB61_1000: # %cond.load1945
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 488
+; CHECK-RV32-NEXT:    li a3, 487
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a1, 256
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_1001
+; CHECK-RV32-NEXT:    j .LBB61_520
+; CHECK-RV32-NEXT:  .LBB61_1001: # %cond.load1949
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 489
+; CHECK-RV32-NEXT:    li a3, 488
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a1, 512
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_1002
+; CHECK-RV32-NEXT:    j .LBB61_521
+; CHECK-RV32-NEXT:  .LBB61_1002: # %cond.load1953
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 490
+; CHECK-RV32-NEXT:    li a3, 489
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    andi a2, a1, 1024
+; CHECK-RV32-NEXT:    bnez a2, .LBB61_1003
+; CHECK-RV32-NEXT:    j .LBB61_522
+; CHECK-RV32-NEXT:  .LBB61_1003: # %cond.load1957
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 491
+; CHECK-RV32-NEXT:    li a3, 490
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a1, 20
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_1004
+; CHECK-RV32-NEXT:    j .LBB61_523
+; CHECK-RV32-NEXT:  .LBB61_1004: # %cond.load1961
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 492
+; CHECK-RV32-NEXT:    li a3, 491
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a1, 19
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_1005
+; CHECK-RV32-NEXT:    j .LBB61_524
+; CHECK-RV32-NEXT:  .LBB61_1005: # %cond.load1965
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 493
+; CHECK-RV32-NEXT:    li a3, 492
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a1, 18
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_1006
+; CHECK-RV32-NEXT:    j .LBB61_525
+; CHECK-RV32-NEXT:  .LBB61_1006: # %cond.load1969
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 494
+; CHECK-RV32-NEXT:    li a3, 493
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a1, 17
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_1007
+; CHECK-RV32-NEXT:    j .LBB61_526
+; CHECK-RV32-NEXT:  .LBB61_1007: # %cond.load1973
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 495
+; CHECK-RV32-NEXT:    li a3, 494
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a1, 16
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_1008
+; CHECK-RV32-NEXT:    j .LBB61_527
+; CHECK-RV32-NEXT:  .LBB61_1008: # %cond.load1977
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 496
+; CHECK-RV32-NEXT:    li a3, 495
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a1, 15
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_1009
+; CHECK-RV32-NEXT:    j .LBB61_528
+; CHECK-RV32-NEXT:  .LBB61_1009: # %cond.load1981
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 497
+; CHECK-RV32-NEXT:    li a3, 496
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a1, 14
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_1010
+; CHECK-RV32-NEXT:    j .LBB61_529
+; CHECK-RV32-NEXT:  .LBB61_1010: # %cond.load1985
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 498
+; CHECK-RV32-NEXT:    li a3, 497
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a1, 13
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_1011
+; CHECK-RV32-NEXT:    j .LBB61_530
+; CHECK-RV32-NEXT:  .LBB61_1011: # %cond.load1989
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 499
+; CHECK-RV32-NEXT:    li a3, 498
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a1, 12
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_1012
+; CHECK-RV32-NEXT:    j .LBB61_531
+; CHECK-RV32-NEXT:  .LBB61_1012: # %cond.load1993
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 500
+; CHECK-RV32-NEXT:    li a3, 499
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a1, 11
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_1013
+; CHECK-RV32-NEXT:    j .LBB61_532
+; CHECK-RV32-NEXT:  .LBB61_1013: # %cond.load1997
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 501
+; CHECK-RV32-NEXT:    li a3, 500
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a1, 10
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_1014
+; CHECK-RV32-NEXT:    j .LBB61_533
+; CHECK-RV32-NEXT:  .LBB61_1014: # %cond.load2001
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 502
+; CHECK-RV32-NEXT:    li a3, 501
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a1, 9
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_1015
+; CHECK-RV32-NEXT:    j .LBB61_534
+; CHECK-RV32-NEXT:  .LBB61_1015: # %cond.load2005
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 503
+; CHECK-RV32-NEXT:    li a3, 502
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a1, 8
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_1016
+; CHECK-RV32-NEXT:    j .LBB61_535
+; CHECK-RV32-NEXT:  .LBB61_1016: # %cond.load2009
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 504
+; CHECK-RV32-NEXT:    li a3, 503
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a1, 7
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_1017
+; CHECK-RV32-NEXT:    j .LBB61_536
+; CHECK-RV32-NEXT:  .LBB61_1017: # %cond.load2013
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 505
+; CHECK-RV32-NEXT:    li a3, 504
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a1, 6
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_1018
+; CHECK-RV32-NEXT:    j .LBB61_537
+; CHECK-RV32-NEXT:  .LBB61_1018: # %cond.load2017
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 506
+; CHECK-RV32-NEXT:    li a3, 505
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a1, 5
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_1019
+; CHECK-RV32-NEXT:    j .LBB61_538
+; CHECK-RV32-NEXT:  .LBB61_1019: # %cond.load2021
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 507
+; CHECK-RV32-NEXT:    li a3, 506
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a1, 4
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_1020
+; CHECK-RV32-NEXT:    j .LBB61_539
+; CHECK-RV32-NEXT:  .LBB61_1020: # %cond.load2025
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 508
+; CHECK-RV32-NEXT:    li a3, 507
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a1, 3
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_1021
+; CHECK-RV32-NEXT:    j .LBB61_540
+; CHECK-RV32-NEXT:  .LBB61_1021: # %cond.load2029
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 509
+; CHECK-RV32-NEXT:    li a3, 508
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a1, 2
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_1022
+; CHECK-RV32-NEXT:    j .LBB61_541
+; CHECK-RV32-NEXT:  .LBB61_1022: # %cond.load2033
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 510
+; CHECK-RV32-NEXT:    li a3, 509
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    slli a2, a1, 1
+; CHECK-RV32-NEXT:    bltz a2, .LBB61_1023
+; CHECK-RV32-NEXT:    j .LBB61_542
+; CHECK-RV32-NEXT:  .LBB61_1023: # %cond.load2037
+; CHECK-RV32-NEXT:    lbu a2, 0(a0)
+; CHECK-RV32-NEXT:    li a3, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a2
+; CHECK-RV32-NEXT:    li a2, 511
+; CHECK-RV32-NEXT:    li a3, 510
+; CHECK-RV32-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV32-NEXT:    addi a0, a0, 1
+; CHECK-RV32-NEXT:    bltz a1, .LBB61_1024
+; CHECK-RV32-NEXT:    j .LBB61_543
+; CHECK-RV32-NEXT:  .LBB61_1024: # %cond.load2041
+; CHECK-RV32-NEXT:    lbu a0, 0(a0)
+; CHECK-RV32-NEXT:    li a1, 512
+; CHECK-RV32-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
+; CHECK-RV32-NEXT:    vmv.s.x v16, a0
+; CHECK-RV32-NEXT:    li a0, 511
+; CHECK-RV32-NEXT:    vslideup.vx v8, v16, a0
+; CHECK-RV32-NEXT:    ret
 ;
-; CHECK-INDEXED-RV64-LABEL: test_expandload_v512i8_vlen512:
-; CHECK-INDEXED-RV64:       # %bb.0:
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.x.s a2, v0
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 1
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_1
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_527
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_1: # %else
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 2
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_2
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_528
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_2: # %else2
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 4
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_3
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_529
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_3: # %else6
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 8
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_4
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_530
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_4: # %else10
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 16
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_5
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_531
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_5: # %else14
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 32
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_6
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_532
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_6: # %else18
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 64
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_7
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_533
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_7: # %else22
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 128
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_8
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_534
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_8: # %else26
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 256
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_9
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_535
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_9: # %else30
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 512
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_10
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_536
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_10: # %else34
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 1024
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_11
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_537
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_11: # %else38
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 52
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_12
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_538
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_12: # %else42
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 51
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_13
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_539
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_13: # %else46
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 50
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_14
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_540
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_14: # %else50
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 49
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_15
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_541
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_15: # %else54
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 48
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_16
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_542
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_16: # %else58
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 47
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_17
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_543
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_17: # %else62
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 46
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_18
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_544
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_18: # %else66
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 45
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_19
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_545
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_19: # %else70
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 44
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_20
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_546
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_20: # %else74
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 43
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_21
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_547
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_21: # %else78
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 42
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_22
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_548
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_22: # %else82
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 41
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_23
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_549
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_23: # %else86
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 40
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_24
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_550
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_24: # %else90
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 39
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_25
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_551
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_25: # %else94
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 38
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_26
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_552
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_26: # %else98
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 37
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_27
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_553
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_27: # %else102
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 36
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_28
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_554
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_28: # %else106
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 35
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_29
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_555
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_29: # %else110
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 34
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_30
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_556
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_30: # %else114
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 33
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_31
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_557
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_31: # %else118
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 32
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_32
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_558
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_32: # %else122
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 31
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_33
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_559
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_33: # %else126
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 30
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_34
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_560
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_34: # %else130
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 29
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_35
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_561
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_35: # %else134
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 28
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_36
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_562
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_36: # %else138
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 27
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_37
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_563
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_37: # %else142
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 26
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_38
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_564
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_38: # %else146
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 25
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_39
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_565
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_39: # %else150
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 24
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_40
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_566
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_40: # %else154
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 23
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_41
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_567
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_41: # %else158
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 22
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_42
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_568
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_42: # %else162
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 21
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_43
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_569
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_43: # %else166
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 20
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_44
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_570
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_44: # %else170
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 19
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_45
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_571
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_45: # %else174
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 18
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_46
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_572
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_46: # %else178
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 17
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_47
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_573
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_47: # %else182
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 16
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_48
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_574
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_48: # %else186
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 15
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_49
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_575
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_49: # %else190
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 14
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_50
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_576
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_50: # %else194
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 13
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_51
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_577
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_51: # %else198
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 12
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_52
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_578
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_52: # %else202
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 11
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_53
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_579
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_53: # %else206
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 10
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_54
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_580
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_54: # %else210
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 9
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_55
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_581
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_55: # %else214
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 8
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_56
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_582
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_56: # %else218
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 7
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_57
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_583
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_57: # %else222
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 6
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_58
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_584
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_58: # %else226
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 5
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_59
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_585
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_59: # %else230
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 4
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_60
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_586
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_60: # %else234
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 3
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_61
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_587
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_61: # %else238
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 2
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_63
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_62: # %cond.load241
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 62
-; CHECK-INDEXED-RV64-NEXT:    li a3, 61
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_63: # %else242
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 1
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vslidedown.vi v16, v0, 1
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_65
-; CHECK-INDEXED-RV64-NEXT:  # %bb.64: # %cond.load245
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v17, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 63
-; CHECK-INDEXED-RV64-NEXT:    li a3, 62
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v17, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_65: # %else246
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.x.s a1, v16
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_66
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_588
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_66: # %else250
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 1
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_67
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_589
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_67: # %else254
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 2
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_68
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_590
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_68: # %else258
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 4
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_69
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_591
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_69: # %else262
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 8
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_70
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_592
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_70: # %else266
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 16
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_71
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_593
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_71: # %else270
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 32
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_72
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_594
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_72: # %else274
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 64
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_73
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_595
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_73: # %else278
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 128
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_74
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_596
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_74: # %else282
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 256
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_75
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_597
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_75: # %else286
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 512
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_76
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_598
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_76: # %else290
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 1024
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_77
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_599
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_77: # %else294
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 52
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_78
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_600
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_78: # %else298
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 51
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_79
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_601
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_79: # %else302
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 50
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_80
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_602
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_80: # %else306
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 49
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_81
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_603
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_81: # %else310
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 48
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_82
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_604
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_82: # %else314
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 47
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_83
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_605
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_83: # %else318
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 46
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_84
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_606
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_84: # %else322
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 45
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_85
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_607
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_85: # %else326
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 44
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_86
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_608
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_86: # %else330
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 43
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_87
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_609
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_87: # %else334
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 42
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_88
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_610
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_88: # %else338
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 41
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_89
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_611
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_89: # %else342
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 40
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_90
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_612
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_90: # %else346
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 39
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_91
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_613
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_91: # %else350
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 38
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_92
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_614
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_92: # %else354
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 37
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_93
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_615
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_93: # %else358
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 36
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_94
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_616
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_94: # %else362
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 35
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_95
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_617
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_95: # %else366
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 34
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_96
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_618
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_96: # %else370
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 33
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_97
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_619
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_97: # %else374
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 32
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_98
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_620
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_98: # %else378
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 31
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_99
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_621
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_99: # %else382
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 30
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_100
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_622
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_100: # %else386
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 29
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_101
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_623
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_101: # %else390
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 28
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_102
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_624
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_102: # %else394
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 27
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_103
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_625
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_103: # %else398
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 26
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_104
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_626
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_104: # %else402
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 25
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_105
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_627
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_105: # %else406
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 24
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_106
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_628
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_106: # %else410
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 23
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_107
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_629
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_107: # %else414
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 22
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_108
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_630
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_108: # %else418
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 21
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_109
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_631
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_109: # %else422
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 20
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_110
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_632
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_110: # %else426
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 19
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_111
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_633
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_111: # %else430
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 18
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_112
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_634
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_112: # %else434
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 17
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_113
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_635
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_113: # %else438
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 16
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_114
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_636
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_114: # %else442
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 15
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_115
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_637
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_115: # %else446
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 14
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_116
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_638
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_116: # %else450
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 13
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_117
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_639
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_117: # %else454
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 12
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_118
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_640
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_118: # %else458
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 11
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_119
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_641
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_119: # %else462
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 10
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_120
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_642
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_120: # %else466
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 9
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_121
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_643
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_121: # %else470
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 8
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_122
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_644
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_122: # %else474
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 7
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_123
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_645
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_123: # %else478
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 6
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_124
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_646
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_124: # %else482
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 5
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_125
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_647
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_125: # %else486
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 4
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_126
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_648
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_126: # %else490
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 3
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_127
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_649
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_127: # %else494
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 2
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_129
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_128: # %cond.load497
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 126
-; CHECK-INDEXED-RV64-NEXT:    li a3, 125
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_129: # %else498
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 1
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vslidedown.vi v16, v0, 2
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_131
-; CHECK-INDEXED-RV64-NEXT:  # %bb.130: # %cond.load501
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v18, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 127
-; CHECK-INDEXED-RV64-NEXT:    li a3, 126
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v18, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_131: # %else502
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.x.s a2, v16
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_132
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_650
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_132: # %else506
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 1
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_133
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_651
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_133: # %else510
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 2
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_134
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_652
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_134: # %else514
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 4
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_135
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_653
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_135: # %else518
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 8
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_136
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_654
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_136: # %else522
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 16
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_137
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_655
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_137: # %else526
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 32
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_138
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_656
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_138: # %else530
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 64
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_139
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_657
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_139: # %else534
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 128
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_140
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_658
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_140: # %else538
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 256
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_141
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_659
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_141: # %else542
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 512
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_142
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_660
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_142: # %else546
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 1024
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_143
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_661
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_143: # %else550
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 52
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_144
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_662
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_144: # %else554
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 51
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_145
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_663
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_145: # %else558
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 50
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_146
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_664
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_146: # %else562
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 49
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_147
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_665
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_147: # %else566
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 48
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_148
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_666
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_148: # %else570
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 47
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_149
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_667
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_149: # %else574
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 46
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_150
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_668
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_150: # %else578
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 45
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_151
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_669
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_151: # %else582
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 44
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_152
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_670
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_152: # %else586
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 43
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_153
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_671
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_153: # %else590
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 42
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_154
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_672
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_154: # %else594
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 41
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_155
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_673
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_155: # %else598
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 40
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_156
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_674
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_156: # %else602
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 39
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_157
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_675
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_157: # %else606
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 38
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_158
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_676
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_158: # %else610
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 37
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_159
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_677
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_159: # %else614
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 36
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_160
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_678
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_160: # %else618
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 35
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_161
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_679
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_161: # %else622
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 34
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_162
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_680
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_162: # %else626
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 33
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_163
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_681
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_163: # %else630
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 32
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_164
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_682
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_164: # %else634
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 31
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_165
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_683
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_165: # %else638
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 30
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_166
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_684
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_166: # %else642
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 29
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_167
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_685
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_167: # %else646
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 28
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_168
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_686
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_168: # %else650
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 27
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_169
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_687
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_169: # %else654
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 26
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_170
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_688
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_170: # %else658
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 25
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_171
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_689
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_171: # %else662
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 24
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_172
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_690
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_172: # %else666
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 23
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_173
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_691
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_173: # %else670
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 22
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_174
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_692
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_174: # %else674
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 21
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_175
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_693
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_175: # %else678
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 20
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_176
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_694
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_176: # %else682
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 19
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_177
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_695
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_177: # %else686
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 18
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_178
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_696
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_178: # %else690
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 17
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_179
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_697
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_179: # %else694
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 16
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_180
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_698
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_180: # %else698
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 15
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_181
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_699
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_181: # %else702
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 14
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_182
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_700
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_182: # %else706
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 13
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_183
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_701
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_183: # %else710
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 12
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_184
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_702
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_184: # %else714
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 11
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_185
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_703
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_185: # %else718
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 10
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_186
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_704
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_186: # %else722
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 9
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_187
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_705
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_187: # %else726
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 8
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_188
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_706
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_188: # %else730
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 7
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_189
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_707
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_189: # %else734
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 6
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_190
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_708
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_190: # %else738
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 5
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_191
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_709
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_191: # %else742
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 4
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_192
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_710
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_192: # %else746
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 3
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_193
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_711
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_193: # %else750
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 2
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_195
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_194: # %cond.load753
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 190
-; CHECK-INDEXED-RV64-NEXT:    li a3, 189
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_195: # %else754
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 1
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vslidedown.vi v16, v0, 3
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_197
-; CHECK-INDEXED-RV64-NEXT:  # %bb.196: # %cond.load757
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v20, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 191
-; CHECK-INDEXED-RV64-NEXT:    li a3, 190
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v20, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_197: # %else758
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.x.s a1, v16
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_198
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_712
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_198: # %else762
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 1
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_199
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_713
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_199: # %else766
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 2
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_200
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_714
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_200: # %else770
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 4
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_201
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_715
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_201: # %else774
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 8
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_202
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_716
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_202: # %else778
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 16
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_203
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_717
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_203: # %else782
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 32
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_204
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_718
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_204: # %else786
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 64
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_205
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_719
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_205: # %else790
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 128
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_206
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_720
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_206: # %else794
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 256
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_207
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_721
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_207: # %else798
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 512
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_208
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_722
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_208: # %else802
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 1024
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_209
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_723
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_209: # %else806
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 52
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_210
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_724
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_210: # %else810
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 51
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_211
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_725
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_211: # %else814
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 50
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_212
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_726
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_212: # %else818
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 49
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_213
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_727
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_213: # %else822
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 48
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_214
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_728
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_214: # %else826
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 47
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_215
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_729
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_215: # %else830
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 46
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_216
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_730
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_216: # %else834
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 45
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_217
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_731
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_217: # %else838
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 44
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_218
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_732
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_218: # %else842
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 43
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_219
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_733
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_219: # %else846
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 42
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_220
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_734
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_220: # %else850
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 41
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_221
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_735
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_221: # %else854
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 40
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_222
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_736
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_222: # %else858
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 39
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_223
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_737
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_223: # %else862
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 38
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_224
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_738
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_224: # %else866
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 37
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_225
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_739
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_225: # %else870
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 36
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_226
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_740
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_226: # %else874
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 35
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_227
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_741
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_227: # %else878
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 34
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_228
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_742
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_228: # %else882
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 33
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_229
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_743
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_229: # %else886
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 32
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_230
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_744
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_230: # %else890
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 31
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_231
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_745
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_231: # %else894
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 30
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_232
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_746
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_232: # %else898
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 29
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_233
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_747
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_233: # %else902
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 28
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_234
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_748
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_234: # %else906
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 27
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_235
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_749
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_235: # %else910
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 26
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_236
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_750
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_236: # %else914
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 25
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_237
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_751
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_237: # %else918
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 24
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_238
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_752
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_238: # %else922
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 23
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_239
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_753
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_239: # %else926
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 22
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_240
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_754
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_240: # %else930
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 21
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_241
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_755
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_241: # %else934
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 20
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_242
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_756
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_242: # %else938
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 19
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_243
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_757
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_243: # %else942
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 18
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_244
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_758
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_244: # %else946
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 17
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_245
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_759
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_245: # %else950
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 16
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_246
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_760
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_246: # %else954
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 15
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_247
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_761
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_247: # %else958
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 14
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_248
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_762
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_248: # %else962
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 13
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_249
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_763
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_249: # %else966
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 12
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_250
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_764
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_250: # %else970
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 11
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_251
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_765
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_251: # %else974
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 10
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_252
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_766
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_252: # %else978
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 9
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_253
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_767
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_253: # %else982
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 8
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_254
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_768
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_254: # %else986
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 7
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_255
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_769
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_255: # %else990
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 6
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_256
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_770
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_256: # %else994
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 5
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_257
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_771
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_257: # %else998
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 4
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_258
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_772
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_258: # %else1002
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 3
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_259
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_773
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_259: # %else1006
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 2
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_261
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_260: # %cond.load1009
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 254
-; CHECK-INDEXED-RV64-NEXT:    li a3, 253
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_261: # %else1010
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 1
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vslidedown.vi v16, v0, 4
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_263
-; CHECK-INDEXED-RV64-NEXT:  # %bb.262: # %cond.load1013
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v20, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 255
-; CHECK-INDEXED-RV64-NEXT:    li a3, 254
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v20, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_263: # %else1014
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.x.s a2, v16
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_264
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_774
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_264: # %else1018
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 1
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_265
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_775
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_265: # %else1022
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 2
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_266
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_776
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_266: # %else1026
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 4
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_267
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_777
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_267: # %else1030
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 8
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_268
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_778
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_268: # %else1034
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 16
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_269
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_779
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_269: # %else1038
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 32
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_270
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_780
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_270: # %else1042
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 64
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_271
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_781
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_271: # %else1046
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 128
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_272
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_782
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_272: # %else1050
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 256
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_273
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_783
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_273: # %else1054
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 512
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_274
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_784
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_274: # %else1058
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 1024
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_275
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_785
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_275: # %else1062
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 52
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_276
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_786
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_276: # %else1066
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 51
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_277
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_787
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_277: # %else1070
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 50
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_278
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_788
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_278: # %else1074
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 49
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_279
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_789
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_279: # %else1078
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 48
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_280
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_790
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_280: # %else1082
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 47
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_281
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_791
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_281: # %else1086
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 46
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_282
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_792
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_282: # %else1090
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 45
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_283
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_793
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_283: # %else1094
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 44
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_284
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_794
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_284: # %else1098
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 43
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_285
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_795
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_285: # %else1102
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 42
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_286
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_796
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_286: # %else1106
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 41
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_287
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_797
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_287: # %else1110
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 40
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_288
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_798
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_288: # %else1114
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 39
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_289
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_799
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_289: # %else1118
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 38
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_290
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_800
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_290: # %else1122
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 37
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_291
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_801
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_291: # %else1126
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 36
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_292
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_802
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_292: # %else1130
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 35
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_293
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_803
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_293: # %else1134
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 34
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_294
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_804
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_294: # %else1138
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 33
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_295
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_805
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_295: # %else1142
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 32
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_296
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_806
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_296: # %else1146
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 31
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_297
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_807
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_297: # %else1150
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 30
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_298
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_808
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_298: # %else1154
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 29
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_299
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_809
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_299: # %else1158
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 28
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_300
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_810
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_300: # %else1162
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 27
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_301
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_811
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_301: # %else1166
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 26
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_302
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_812
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_302: # %else1170
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 25
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_303
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_813
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_303: # %else1174
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 24
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_304
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_814
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_304: # %else1178
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 23
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_305
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_815
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_305: # %else1182
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 22
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_306
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_816
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_306: # %else1186
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 21
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_307
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_817
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_307: # %else1190
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 20
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_308
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_818
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_308: # %else1194
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 19
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_309
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_819
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_309: # %else1198
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 18
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_310
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_820
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_310: # %else1202
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 17
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_311
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_821
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_311: # %else1206
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 16
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_312
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_822
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_312: # %else1210
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 15
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_313
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_823
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_313: # %else1214
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 14
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_314
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_824
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_314: # %else1218
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 13
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_315
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_825
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_315: # %else1222
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 12
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_316
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_826
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_316: # %else1226
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 11
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_317
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_827
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_317: # %else1230
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 10
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_318
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_828
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_318: # %else1234
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 9
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_319
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_829
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_319: # %else1238
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 8
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_320
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_830
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_320: # %else1242
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 7
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_321
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_831
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_321: # %else1246
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 6
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_322
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_832
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_322: # %else1250
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 5
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_323
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_833
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_323: # %else1254
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 4
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_324
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_834
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_324: # %else1258
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 3
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_325
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_835
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_325: # %else1262
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 2
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_327
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_326: # %cond.load1265
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 318
-; CHECK-INDEXED-RV64-NEXT:    li a3, 317
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_327: # %else1266
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 1
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vslidedown.vi v16, v0, 5
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_329
-; CHECK-INDEXED-RV64-NEXT:  # %bb.328: # %cond.load1269
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 319
-; CHECK-INDEXED-RV64-NEXT:    li a3, 318
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_329: # %else1270
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.x.s a1, v16
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_330
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_836
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_330: # %else1274
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 1
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_331
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_837
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_331: # %else1278
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 2
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_332
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_838
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_332: # %else1282
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 4
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_333
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_839
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_333: # %else1286
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 8
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_334
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_840
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_334: # %else1290
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 16
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_335
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_841
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_335: # %else1294
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 32
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_336
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_842
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_336: # %else1298
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 64
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_337
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_843
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_337: # %else1302
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 128
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_338
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_844
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_338: # %else1306
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 256
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_339
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_845
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_339: # %else1310
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 512
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_340
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_846
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_340: # %else1314
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 1024
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_341
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_847
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_341: # %else1318
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 52
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_342
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_848
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_342: # %else1322
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 51
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_343
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_849
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_343: # %else1326
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 50
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_344
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_850
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_344: # %else1330
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 49
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_345
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_851
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_345: # %else1334
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 48
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_346
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_852
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_346: # %else1338
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 47
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_347
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_853
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_347: # %else1342
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 46
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_348
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_854
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_348: # %else1346
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 45
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_349
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_855
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_349: # %else1350
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 44
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_350
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_856
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_350: # %else1354
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 43
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_351
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_857
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_351: # %else1358
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 42
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_352
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_858
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_352: # %else1362
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 41
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_353
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_859
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_353: # %else1366
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 40
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_354
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_860
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_354: # %else1370
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 39
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_355
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_861
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_355: # %else1374
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 38
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_356
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_862
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_356: # %else1378
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 37
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_357
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_863
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_357: # %else1382
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 36
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_358
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_864
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_358: # %else1386
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 35
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_359
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_865
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_359: # %else1390
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 34
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_360
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_866
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_360: # %else1394
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 33
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_361
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_867
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_361: # %else1398
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 32
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_362
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_868
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_362: # %else1402
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 31
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_363
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_869
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_363: # %else1406
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 30
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_364
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_870
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_364: # %else1410
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 29
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_365
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_871
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_365: # %else1414
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 28
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_366
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_872
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_366: # %else1418
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 27
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_367
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_873
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_367: # %else1422
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 26
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_368
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_874
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_368: # %else1426
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 25
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_369
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_875
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_369: # %else1430
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 24
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_370
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_876
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_370: # %else1434
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 23
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_371
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_877
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_371: # %else1438
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 22
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_372
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_878
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_372: # %else1442
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 21
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_373
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_879
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_373: # %else1446
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 20
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_374
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_880
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_374: # %else1450
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 19
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_375
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_881
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_375: # %else1454
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 18
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_376
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_882
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_376: # %else1458
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 17
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_377
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_883
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_377: # %else1462
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 16
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_378
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_884
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_378: # %else1466
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 15
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_379
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_885
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_379: # %else1470
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 14
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_380
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_886
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_380: # %else1474
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 13
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_381
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_887
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_381: # %else1478
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 12
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_382
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_888
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_382: # %else1482
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 11
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_383
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_889
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_383: # %else1486
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 10
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_384
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_890
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_384: # %else1490
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 9
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_385
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_891
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_385: # %else1494
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 8
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_386
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_892
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_386: # %else1498
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 7
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_387
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_893
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_387: # %else1502
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 6
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_388
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_894
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_388: # %else1506
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 5
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_389
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_895
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_389: # %else1510
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 4
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_390
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_896
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_390: # %else1514
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 3
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_391
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_897
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_391: # %else1518
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 2
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_393
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_392: # %cond.load1521
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 382
-; CHECK-INDEXED-RV64-NEXT:    li a3, 381
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_393: # %else1522
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 1
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vslidedown.vi v16, v0, 6
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_395
-; CHECK-INDEXED-RV64-NEXT:  # %bb.394: # %cond.load1525
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 383
-; CHECK-INDEXED-RV64-NEXT:    li a3, 382
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_395: # %else1526
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.x.s a2, v16
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_396
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_898
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_396: # %else1530
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 1
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_397
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_899
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_397: # %else1534
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 2
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_398
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_900
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_398: # %else1538
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 4
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_399
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_901
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_399: # %else1542
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 8
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_400
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_902
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_400: # %else1546
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 16
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_401
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_903
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_401: # %else1550
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 32
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_402
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_904
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_402: # %else1554
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 64
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_403
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_905
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_403: # %else1558
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 128
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_404
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_906
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_404: # %else1562
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 256
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_405
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_907
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_405: # %else1566
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 512
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_406
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_908
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_406: # %else1570
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 1024
-; CHECK-INDEXED-RV64-NEXT:    beqz a1, .LBB61_407
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_909
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_407: # %else1574
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 52
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_408
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_910
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_408: # %else1578
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 51
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_409
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_911
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_409: # %else1582
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 50
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_410
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_912
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_410: # %else1586
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 49
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_411
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_913
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_411: # %else1590
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 48
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_412
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_914
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_412: # %else1594
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 47
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_413
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_915
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_413: # %else1598
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 46
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_414
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_916
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_414: # %else1602
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 45
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_415
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_917
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_415: # %else1606
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 44
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_416
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_918
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_416: # %else1610
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 43
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_417
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_919
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_417: # %else1614
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 42
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_418
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_920
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_418: # %else1618
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 41
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_419
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_921
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_419: # %else1622
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 40
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_420
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_922
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_420: # %else1626
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 39
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_421
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_923
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_421: # %else1630
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 38
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_422
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_924
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_422: # %else1634
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 37
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_423
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_925
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_423: # %else1638
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 36
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_424
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_926
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_424: # %else1642
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 35
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_425
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_927
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_425: # %else1646
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 34
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_426
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_928
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_426: # %else1650
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 33
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_427
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_929
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_427: # %else1654
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 32
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_428
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_930
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_428: # %else1658
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 31
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_429
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_931
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_429: # %else1662
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 30
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_430
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_932
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_430: # %else1666
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 29
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_431
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_933
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_431: # %else1670
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 28
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_432
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_934
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_432: # %else1674
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 27
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_433
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_935
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_433: # %else1678
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 26
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_434
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_936
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_434: # %else1682
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 25
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_435
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_937
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_435: # %else1686
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 24
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_436
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_938
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_436: # %else1690
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 23
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_437
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_939
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_437: # %else1694
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 22
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_438
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_940
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_438: # %else1698
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 21
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_439
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_941
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_439: # %else1702
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 20
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_440
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_942
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_440: # %else1706
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 19
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_441
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_943
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_441: # %else1710
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 18
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_442
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_944
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_442: # %else1714
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 17
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_443
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_945
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_443: # %else1718
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 16
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_444
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_946
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_444: # %else1722
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 15
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_445
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_947
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_445: # %else1726
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 14
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_446
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_948
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_446: # %else1730
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 13
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_447
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_949
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_447: # %else1734
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 12
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_448
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_950
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_448: # %else1738
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 11
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_449
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_951
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_449: # %else1742
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 10
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_450
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_952
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_450: # %else1746
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 9
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_451
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_953
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_451: # %else1750
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 8
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_452
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_954
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_452: # %else1754
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 7
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_453
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_955
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_453: # %else1758
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 6
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_454
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_956
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_454: # %else1762
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 5
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_455
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_957
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_455: # %else1766
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 4
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_456
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_958
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_456: # %else1770
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 3
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_457
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_959
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_457: # %else1774
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 2
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_459
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_458: # %cond.load1777
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 446
-; CHECK-INDEXED-RV64-NEXT:    li a3, 445
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_459: # %else1778
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 1
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vslidedown.vi v16, v0, 7
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_461
-; CHECK-INDEXED-RV64-NEXT:  # %bb.460: # %cond.load1781
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 447
-; CHECK-INDEXED-RV64-NEXT:    li a3, 446
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_461: # %else1782
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.x.s a1, v16
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_462
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_960
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_462: # %else1786
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 1
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_463
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_961
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_463: # %else1790
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 2
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_464
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_962
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_464: # %else1794
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 4
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_465
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_963
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_465: # %else1798
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 8
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_466
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_964
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_466: # %else1802
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 16
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_467
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_965
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_467: # %else1806
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 32
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_468
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_966
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_468: # %else1810
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 64
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_469
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_967
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_469: # %else1814
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 128
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_470
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_968
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_470: # %else1818
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 256
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_471
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_969
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_471: # %else1822
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 512
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_472
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_970
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_472: # %else1826
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 1024
-; CHECK-INDEXED-RV64-NEXT:    beqz a2, .LBB61_473
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_971
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_473: # %else1830
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 52
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_474
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_972
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_474: # %else1834
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 51
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_475
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_973
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_475: # %else1838
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 50
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_476
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_974
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_476: # %else1842
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 49
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_477
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_975
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_477: # %else1846
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 48
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_478
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_976
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_478: # %else1850
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 47
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_479
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_977
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_479: # %else1854
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 46
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_480
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_978
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_480: # %else1858
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 45
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_481
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_979
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_481: # %else1862
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 44
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_482
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_980
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_482: # %else1866
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 43
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_483
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_981
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_483: # %else1870
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 42
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_484
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_982
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_484: # %else1874
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 41
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_485
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_983
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_485: # %else1878
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 40
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_486
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_984
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_486: # %else1882
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 39
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_487
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_985
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_487: # %else1886
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 38
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_488
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_986
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_488: # %else1890
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 37
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_489
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_987
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_489: # %else1894
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 36
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_490
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_988
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_490: # %else1898
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 35
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_491
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_989
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_491: # %else1902
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 34
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_492
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_990
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_492: # %else1906
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 33
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_493
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_991
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_493: # %else1910
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 32
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_494
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_992
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_494: # %else1914
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 31
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_495
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_993
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_495: # %else1918
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 30
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_496
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_994
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_496: # %else1922
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 29
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_497
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_995
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_497: # %else1926
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 28
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_498
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_996
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_498: # %else1930
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 27
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_499
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_997
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_499: # %else1934
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 26
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_500
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_998
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_500: # %else1938
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 25
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_501
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_999
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_501: # %else1942
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 24
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_502
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1000
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_502: # %else1946
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 23
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_503
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1001
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_503: # %else1950
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 22
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_504
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1002
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_504: # %else1954
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 21
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_505
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1003
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_505: # %else1958
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 20
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_506
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1004
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_506: # %else1962
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 19
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_507
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1005
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_507: # %else1966
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 18
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_508
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1006
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_508: # %else1970
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 17
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_509
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1007
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_509: # %else1974
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 16
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_510
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1008
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_510: # %else1978
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 15
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_511
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1009
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_511: # %else1982
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 14
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_512
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1010
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_512: # %else1986
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 13
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_513
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1011
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_513: # %else1990
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 12
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_514
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1012
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_514: # %else1994
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 11
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_515
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1013
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_515: # %else1998
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 10
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_516
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1014
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_516: # %else2002
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 9
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_517
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1015
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_517: # %else2006
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 8
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_518
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1016
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_518: # %else2010
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 7
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_519
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1017
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_519: # %else2014
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 6
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_520
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1018
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_520: # %else2018
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 5
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_521
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1019
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_521: # %else2022
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 4
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_522
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1020
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_522: # %else2026
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 3
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_523
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1021
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_523: # %else2030
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 2
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_524
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1022
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_524: # %else2034
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 1
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_525
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1023
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_525: # %else2038
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_526
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_1024
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_526: # %else2042
-; CHECK-INDEXED-RV64-NEXT:    ret
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_527: # %cond.load
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e8, mf8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v8, a1
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 2
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_528
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_2
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_528: # %cond.load1
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e8, mf8, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 2, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 1
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 4
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_529
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_3
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_529: # %cond.load5
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 3, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 2
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 8
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_530
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_4
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_530: # %cond.load9
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 4, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 16
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_531
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_5
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_531: # %cond.load13
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 5, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 4
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 32
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_532
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_6
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_532: # %cond.load17
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 6, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 5
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 64
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_533
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_7
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_533: # %cond.load21
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 7, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 6
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 128
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_534
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_8
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_534: # %cond.load25
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 8, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 7
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 256
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_535
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_9
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_535: # %cond.load29
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 9, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 8
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 512
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_536
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_10
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_536: # %cond.load33
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 10, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 9
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 1024
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_537
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_11
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_537: # %cond.load37
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 11, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 10
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 52
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_538
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_12
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_538: # %cond.load41
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 12, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 11
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 51
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_539
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_13
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_539: # %cond.load45
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 13, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 12
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 50
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_540
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_14
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_540: # %cond.load49
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 14, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 13
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 49
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_541
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_15
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_541: # %cond.load53
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 15, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 14
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 48
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_542
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_16
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_542: # %cond.load57
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 16, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 15
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 47
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_543
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_17
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_543: # %cond.load61
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 17, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 16
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 46
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_544
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_18
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_544: # %cond.load65
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 18, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 17
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 45
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_545
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_19
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_545: # %cond.load69
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 19, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 18
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 44
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_546
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_20
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_546: # %cond.load73
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 20, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 19
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 43
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_547
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_21
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_547: # %cond.load77
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 21, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 20
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 42
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_548
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_22
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_548: # %cond.load81
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 22, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 21
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 41
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_549
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_23
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_549: # %cond.load85
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 23, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 22
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 40
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_550
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_24
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_550: # %cond.load89
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 24, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 23
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 39
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_551
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_25
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_551: # %cond.load93
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 25, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 24
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 38
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_552
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_26
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_552: # %cond.load97
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 26, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 25
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 37
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_553
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_27
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_553: # %cond.load101
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 27, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 26
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 36
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_554
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_28
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_554: # %cond.load105
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 28, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 27
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 35
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_555
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_29
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_555: # %cond.load109
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 29, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 28
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 34
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_556
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_30
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_556: # %cond.load113
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 30, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 29
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 33
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_557
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_31
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_557: # %cond.load117
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 31, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v16, 30
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 32
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_558
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_32
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_558: # %cond.load121
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 32
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vi v8, v24, 31
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 31
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_559
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_33
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_559: # %cond.load125
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 33
-; CHECK-INDEXED-RV64-NEXT:    li a3, 32
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 30
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_560
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_34
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_560: # %cond.load129
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 34
-; CHECK-INDEXED-RV64-NEXT:    li a3, 33
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 29
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_561
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_35
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_561: # %cond.load133
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 35
-; CHECK-INDEXED-RV64-NEXT:    li a3, 34
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 28
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_562
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_36
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_562: # %cond.load137
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 36
-; CHECK-INDEXED-RV64-NEXT:    li a3, 35
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 27
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_563
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_37
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_563: # %cond.load141
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 37
-; CHECK-INDEXED-RV64-NEXT:    li a3, 36
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 26
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_564
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_38
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_564: # %cond.load145
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 38
-; CHECK-INDEXED-RV64-NEXT:    li a3, 37
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 25
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_565
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_39
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_565: # %cond.load149
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 39
-; CHECK-INDEXED-RV64-NEXT:    li a3, 38
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 24
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_566
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_40
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_566: # %cond.load153
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 40
-; CHECK-INDEXED-RV64-NEXT:    li a3, 39
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 23
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_567
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_41
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_567: # %cond.load157
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 41
-; CHECK-INDEXED-RV64-NEXT:    li a3, 40
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 22
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_568
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_42
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_568: # %cond.load161
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 42
-; CHECK-INDEXED-RV64-NEXT:    li a3, 41
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 21
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_569
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_43
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_569: # %cond.load165
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 43
-; CHECK-INDEXED-RV64-NEXT:    li a3, 42
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 20
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_570
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_44
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_570: # %cond.load169
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 44
-; CHECK-INDEXED-RV64-NEXT:    li a3, 43
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 19
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_571
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_45
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_571: # %cond.load173
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 45
-; CHECK-INDEXED-RV64-NEXT:    li a3, 44
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 18
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_572
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_46
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_572: # %cond.load177
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 46
-; CHECK-INDEXED-RV64-NEXT:    li a3, 45
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 17
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_573
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_47
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_573: # %cond.load181
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 47
-; CHECK-INDEXED-RV64-NEXT:    li a3, 46
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 16
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_574
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_48
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_574: # %cond.load185
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 48
-; CHECK-INDEXED-RV64-NEXT:    li a3, 47
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 15
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_575
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_49
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_575: # %cond.load189
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 49
-; CHECK-INDEXED-RV64-NEXT:    li a3, 48
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 14
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_576
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_50
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_576: # %cond.load193
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 50
-; CHECK-INDEXED-RV64-NEXT:    li a3, 49
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 13
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_577
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_51
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_577: # %cond.load197
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 51
-; CHECK-INDEXED-RV64-NEXT:    li a3, 50
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 12
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_578
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_52
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_578: # %cond.load201
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 52
-; CHECK-INDEXED-RV64-NEXT:    li a3, 51
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 11
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_579
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_53
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_579: # %cond.load205
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 53
-; CHECK-INDEXED-RV64-NEXT:    li a3, 52
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 10
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_580
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_54
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_580: # %cond.load209
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 54
-; CHECK-INDEXED-RV64-NEXT:    li a3, 53
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 9
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_581
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_55
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_581: # %cond.load213
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 55
-; CHECK-INDEXED-RV64-NEXT:    li a3, 54
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 8
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_582
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_56
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_582: # %cond.load217
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 56
-; CHECK-INDEXED-RV64-NEXT:    li a3, 55
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 7
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_583
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_57
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_583: # %cond.load221
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 57
-; CHECK-INDEXED-RV64-NEXT:    li a3, 56
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 6
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_584
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_58
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_584: # %cond.load225
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 58
-; CHECK-INDEXED-RV64-NEXT:    li a3, 57
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 5
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_585
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_59
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_585: # %cond.load229
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 59
-; CHECK-INDEXED-RV64-NEXT:    li a3, 58
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 4
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_586
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_60
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_586: # %cond.load233
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 60
-; CHECK-INDEXED-RV64-NEXT:    li a3, 59
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 3
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_587
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_61
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_587: # %cond.load237
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 61
-; CHECK-INDEXED-RV64-NEXT:    li a3, 60
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 2
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_1025
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_62
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_1025: # %cond.load237
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_63
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_588: # %cond.load249
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 64
-; CHECK-INDEXED-RV64-NEXT:    li a3, 63
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m1, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv1r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 1
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_589
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_67
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_589: # %cond.load253
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 65
-; CHECK-INDEXED-RV64-NEXT:    li a3, 64
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 2
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_590
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_68
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_590: # %cond.load257
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 66
-; CHECK-INDEXED-RV64-NEXT:    li a3, 65
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 4
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_591
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_69
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_591: # %cond.load261
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 67
-; CHECK-INDEXED-RV64-NEXT:    li a3, 66
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 8
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_592
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_70
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_592: # %cond.load265
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 68
-; CHECK-INDEXED-RV64-NEXT:    li a3, 67
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 16
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_593
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_71
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_593: # %cond.load269
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 69
-; CHECK-INDEXED-RV64-NEXT:    li a3, 68
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 32
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_594
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_72
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_594: # %cond.load273
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 70
-; CHECK-INDEXED-RV64-NEXT:    li a3, 69
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 64
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_595
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_73
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_595: # %cond.load277
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 71
-; CHECK-INDEXED-RV64-NEXT:    li a3, 70
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 128
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_596
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_74
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_596: # %cond.load281
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 72
-; CHECK-INDEXED-RV64-NEXT:    li a3, 71
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 256
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_597
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_75
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_597: # %cond.load285
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 73
-; CHECK-INDEXED-RV64-NEXT:    li a3, 72
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 512
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_598
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_76
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_598: # %cond.load289
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 74
-; CHECK-INDEXED-RV64-NEXT:    li a3, 73
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 1024
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_599
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_77
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_599: # %cond.load293
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 75
-; CHECK-INDEXED-RV64-NEXT:    li a3, 74
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 52
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_600
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_78
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_600: # %cond.load297
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 76
-; CHECK-INDEXED-RV64-NEXT:    li a3, 75
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 51
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_601
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_79
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_601: # %cond.load301
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 77
-; CHECK-INDEXED-RV64-NEXT:    li a3, 76
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 50
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_602
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_80
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_602: # %cond.load305
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 78
-; CHECK-INDEXED-RV64-NEXT:    li a3, 77
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 49
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_603
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_81
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_603: # %cond.load309
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 79
-; CHECK-INDEXED-RV64-NEXT:    li a3, 78
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 48
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_604
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_82
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_604: # %cond.load313
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 80
-; CHECK-INDEXED-RV64-NEXT:    li a3, 79
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 47
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_605
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_83
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_605: # %cond.load317
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 81
-; CHECK-INDEXED-RV64-NEXT:    li a3, 80
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 46
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_606
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_84
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_606: # %cond.load321
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 82
-; CHECK-INDEXED-RV64-NEXT:    li a3, 81
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 45
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_607
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_85
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_607: # %cond.load325
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 83
-; CHECK-INDEXED-RV64-NEXT:    li a3, 82
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 44
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_608
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_86
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_608: # %cond.load329
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 84
-; CHECK-INDEXED-RV64-NEXT:    li a3, 83
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 43
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_609
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_87
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_609: # %cond.load333
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 85
-; CHECK-INDEXED-RV64-NEXT:    li a3, 84
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 42
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_610
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_88
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_610: # %cond.load337
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 86
-; CHECK-INDEXED-RV64-NEXT:    li a3, 85
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 41
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_611
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_89
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_611: # %cond.load341
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 87
-; CHECK-INDEXED-RV64-NEXT:    li a3, 86
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 40
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_612
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_90
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_612: # %cond.load345
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 88
-; CHECK-INDEXED-RV64-NEXT:    li a3, 87
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 39
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_613
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_91
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_613: # %cond.load349
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 89
-; CHECK-INDEXED-RV64-NEXT:    li a3, 88
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 38
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_614
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_92
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_614: # %cond.load353
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 90
-; CHECK-INDEXED-RV64-NEXT:    li a3, 89
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 37
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_615
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_93
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_615: # %cond.load357
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 91
-; CHECK-INDEXED-RV64-NEXT:    li a3, 90
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 36
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_616
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_94
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_616: # %cond.load361
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 92
-; CHECK-INDEXED-RV64-NEXT:    li a3, 91
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 35
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_617
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_95
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_617: # %cond.load365
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 93
-; CHECK-INDEXED-RV64-NEXT:    li a3, 92
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 34
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_618
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_96
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_618: # %cond.load369
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 94
-; CHECK-INDEXED-RV64-NEXT:    li a3, 93
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 33
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_619
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_97
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_619: # %cond.load373
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 95
-; CHECK-INDEXED-RV64-NEXT:    li a3, 94
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 32
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_620
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_98
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_620: # %cond.load377
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 96
-; CHECK-INDEXED-RV64-NEXT:    li a3, 95
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 31
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_621
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_99
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_621: # %cond.load381
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 97
-; CHECK-INDEXED-RV64-NEXT:    li a3, 96
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 30
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_622
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_100
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_622: # %cond.load385
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 98
-; CHECK-INDEXED-RV64-NEXT:    li a3, 97
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 29
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_623
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_101
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_623: # %cond.load389
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 99
-; CHECK-INDEXED-RV64-NEXT:    li a3, 98
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 28
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_624
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_102
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_624: # %cond.load393
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 100
-; CHECK-INDEXED-RV64-NEXT:    li a3, 99
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 27
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_625
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_103
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_625: # %cond.load397
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 101
-; CHECK-INDEXED-RV64-NEXT:    li a3, 100
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 26
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_626
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_104
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_626: # %cond.load401
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 102
-; CHECK-INDEXED-RV64-NEXT:    li a3, 101
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 25
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_627
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_105
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_627: # %cond.load405
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 103
-; CHECK-INDEXED-RV64-NEXT:    li a3, 102
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 24
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_628
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_106
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_628: # %cond.load409
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 104
-; CHECK-INDEXED-RV64-NEXT:    li a3, 103
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 23
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_629
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_107
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_629: # %cond.load413
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 105
-; CHECK-INDEXED-RV64-NEXT:    li a3, 104
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 22
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_630
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_108
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_630: # %cond.load417
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 106
-; CHECK-INDEXED-RV64-NEXT:    li a3, 105
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 21
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_631
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_109
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_631: # %cond.load421
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 107
-; CHECK-INDEXED-RV64-NEXT:    li a3, 106
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 20
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_632
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_110
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_632: # %cond.load425
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 108
-; CHECK-INDEXED-RV64-NEXT:    li a3, 107
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 19
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_633
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_111
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_633: # %cond.load429
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 109
-; CHECK-INDEXED-RV64-NEXT:    li a3, 108
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 18
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_634
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_112
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_634: # %cond.load433
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 110
-; CHECK-INDEXED-RV64-NEXT:    li a3, 109
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 17
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_635
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_113
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_635: # %cond.load437
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 111
-; CHECK-INDEXED-RV64-NEXT:    li a3, 110
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 16
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_636
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_114
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_636: # %cond.load441
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 112
-; CHECK-INDEXED-RV64-NEXT:    li a3, 111
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 15
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_637
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_115
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_637: # %cond.load445
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 113
-; CHECK-INDEXED-RV64-NEXT:    li a3, 112
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 14
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_638
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_116
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_638: # %cond.load449
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 114
-; CHECK-INDEXED-RV64-NEXT:    li a3, 113
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 13
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_639
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_117
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_639: # %cond.load453
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 115
-; CHECK-INDEXED-RV64-NEXT:    li a3, 114
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 12
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_640
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_118
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_640: # %cond.load457
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 116
-; CHECK-INDEXED-RV64-NEXT:    li a3, 115
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 11
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_641
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_119
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_641: # %cond.load461
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 117
-; CHECK-INDEXED-RV64-NEXT:    li a3, 116
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 10
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_642
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_120
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_642: # %cond.load465
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 118
-; CHECK-INDEXED-RV64-NEXT:    li a3, 117
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 9
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_643
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_121
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_643: # %cond.load469
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 119
-; CHECK-INDEXED-RV64-NEXT:    li a3, 118
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 8
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_644
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_122
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_644: # %cond.load473
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 120
-; CHECK-INDEXED-RV64-NEXT:    li a3, 119
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 7
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_645
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_123
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_645: # %cond.load477
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 121
-; CHECK-INDEXED-RV64-NEXT:    li a3, 120
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 6
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_646
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_124
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_646: # %cond.load481
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 122
-; CHECK-INDEXED-RV64-NEXT:    li a3, 121
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 5
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_647
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_125
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_647: # %cond.load485
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 123
-; CHECK-INDEXED-RV64-NEXT:    li a3, 122
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 4
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_648
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_126
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_648: # %cond.load489
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 124
-; CHECK-INDEXED-RV64-NEXT:    li a3, 123
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 3
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_649
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_127
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_649: # %cond.load493
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 125
-; CHECK-INDEXED-RV64-NEXT:    li a3, 124
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 2
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_1026
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_128
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_1026: # %cond.load493
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_129
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_650: # %cond.load505
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v24, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 128
-; CHECK-INDEXED-RV64-NEXT:    li a3, 127
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m2, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v24, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv2r.v v16, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v16
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 1
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_651
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_133
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_651: # %cond.load509
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 129
-; CHECK-INDEXED-RV64-NEXT:    li a3, 128
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 2
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_652
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_134
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_652: # %cond.load513
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 130
-; CHECK-INDEXED-RV64-NEXT:    li a3, 129
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 4
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_653
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_135
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_653: # %cond.load517
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 131
-; CHECK-INDEXED-RV64-NEXT:    li a3, 130
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 8
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_654
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_136
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_654: # %cond.load521
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 132
-; CHECK-INDEXED-RV64-NEXT:    li a3, 131
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 16
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_655
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_137
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_655: # %cond.load525
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 133
-; CHECK-INDEXED-RV64-NEXT:    li a3, 132
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 32
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_656
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_138
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_656: # %cond.load529
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 134
-; CHECK-INDEXED-RV64-NEXT:    li a3, 133
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 64
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_657
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_139
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_657: # %cond.load533
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 135
-; CHECK-INDEXED-RV64-NEXT:    li a3, 134
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 128
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_658
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_140
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_658: # %cond.load537
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 136
-; CHECK-INDEXED-RV64-NEXT:    li a3, 135
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 256
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_659
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_141
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_659: # %cond.load541
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 137
-; CHECK-INDEXED-RV64-NEXT:    li a3, 136
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 512
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_660
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_142
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_660: # %cond.load545
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 138
-; CHECK-INDEXED-RV64-NEXT:    li a3, 137
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 1024
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_661
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_143
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_661: # %cond.load549
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 139
-; CHECK-INDEXED-RV64-NEXT:    li a3, 138
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 52
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_662
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_144
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_662: # %cond.load553
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 140
-; CHECK-INDEXED-RV64-NEXT:    li a3, 139
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 51
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_663
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_145
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_663: # %cond.load557
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 141
-; CHECK-INDEXED-RV64-NEXT:    li a3, 140
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 50
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_664
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_146
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_664: # %cond.load561
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 142
-; CHECK-INDEXED-RV64-NEXT:    li a3, 141
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 49
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_665
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_147
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_665: # %cond.load565
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 143
-; CHECK-INDEXED-RV64-NEXT:    li a3, 142
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 48
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_666
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_148
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_666: # %cond.load569
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 144
-; CHECK-INDEXED-RV64-NEXT:    li a3, 143
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 47
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_667
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_149
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_667: # %cond.load573
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 145
-; CHECK-INDEXED-RV64-NEXT:    li a3, 144
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 46
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_668
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_150
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_668: # %cond.load577
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 146
-; CHECK-INDEXED-RV64-NEXT:    li a3, 145
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 45
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_669
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_151
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_669: # %cond.load581
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 147
-; CHECK-INDEXED-RV64-NEXT:    li a3, 146
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 44
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_670
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_152
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_670: # %cond.load585
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 148
-; CHECK-INDEXED-RV64-NEXT:    li a3, 147
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 43
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_671
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_153
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_671: # %cond.load589
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 149
-; CHECK-INDEXED-RV64-NEXT:    li a3, 148
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 42
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_672
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_154
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_672: # %cond.load593
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 150
-; CHECK-INDEXED-RV64-NEXT:    li a3, 149
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 41
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_673
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_155
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_673: # %cond.load597
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 151
-; CHECK-INDEXED-RV64-NEXT:    li a3, 150
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 40
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_674
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_156
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_674: # %cond.load601
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 152
-; CHECK-INDEXED-RV64-NEXT:    li a3, 151
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 39
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_675
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_157
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_675: # %cond.load605
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 153
-; CHECK-INDEXED-RV64-NEXT:    li a3, 152
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 38
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_676
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_158
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_676: # %cond.load609
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 154
-; CHECK-INDEXED-RV64-NEXT:    li a3, 153
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 37
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_677
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_159
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_677: # %cond.load613
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 155
-; CHECK-INDEXED-RV64-NEXT:    li a3, 154
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 36
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_678
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_160
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_678: # %cond.load617
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 156
-; CHECK-INDEXED-RV64-NEXT:    li a3, 155
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 35
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_679
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_161
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_679: # %cond.load621
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 157
-; CHECK-INDEXED-RV64-NEXT:    li a3, 156
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 34
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_680
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_162
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_680: # %cond.load625
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 158
-; CHECK-INDEXED-RV64-NEXT:    li a3, 157
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 33
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_681
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_163
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_681: # %cond.load629
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 159
-; CHECK-INDEXED-RV64-NEXT:    li a3, 158
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 32
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_682
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_164
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_682: # %cond.load633
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 160
-; CHECK-INDEXED-RV64-NEXT:    li a3, 159
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 31
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_683
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_165
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_683: # %cond.load637
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 161
-; CHECK-INDEXED-RV64-NEXT:    li a3, 160
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 30
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_684
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_166
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_684: # %cond.load641
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 162
-; CHECK-INDEXED-RV64-NEXT:    li a3, 161
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 29
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_685
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_167
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_685: # %cond.load645
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 163
-; CHECK-INDEXED-RV64-NEXT:    li a3, 162
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 28
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_686
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_168
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_686: # %cond.load649
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 164
-; CHECK-INDEXED-RV64-NEXT:    li a3, 163
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 27
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_687
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_169
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_687: # %cond.load653
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 165
-; CHECK-INDEXED-RV64-NEXT:    li a3, 164
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 26
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_688
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_170
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_688: # %cond.load657
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 166
-; CHECK-INDEXED-RV64-NEXT:    li a3, 165
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 25
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_689
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_171
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_689: # %cond.load661
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 167
-; CHECK-INDEXED-RV64-NEXT:    li a3, 166
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 24
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_690
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_172
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_690: # %cond.load665
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 168
-; CHECK-INDEXED-RV64-NEXT:    li a3, 167
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 23
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_691
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_173
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_691: # %cond.load669
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 169
-; CHECK-INDEXED-RV64-NEXT:    li a3, 168
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 22
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_692
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_174
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_692: # %cond.load673
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 170
-; CHECK-INDEXED-RV64-NEXT:    li a3, 169
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 21
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_693
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_175
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_693: # %cond.load677
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 171
-; CHECK-INDEXED-RV64-NEXT:    li a3, 170
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 20
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_694
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_176
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_694: # %cond.load681
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 172
-; CHECK-INDEXED-RV64-NEXT:    li a3, 171
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 19
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_695
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_177
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_695: # %cond.load685
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 173
-; CHECK-INDEXED-RV64-NEXT:    li a3, 172
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 18
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_696
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_178
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_696: # %cond.load689
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 174
-; CHECK-INDEXED-RV64-NEXT:    li a3, 173
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 17
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_697
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_179
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_697: # %cond.load693
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 175
-; CHECK-INDEXED-RV64-NEXT:    li a3, 174
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 16
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_698
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_180
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_698: # %cond.load697
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 176
-; CHECK-INDEXED-RV64-NEXT:    li a3, 175
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 15
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_699
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_181
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_699: # %cond.load701
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 177
-; CHECK-INDEXED-RV64-NEXT:    li a3, 176
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 14
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_700
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_182
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_700: # %cond.load705
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 178
-; CHECK-INDEXED-RV64-NEXT:    li a3, 177
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 13
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_701
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_183
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_701: # %cond.load709
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 179
-; CHECK-INDEXED-RV64-NEXT:    li a3, 178
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 12
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_702
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_184
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_702: # %cond.load713
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 180
-; CHECK-INDEXED-RV64-NEXT:    li a3, 179
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 11
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_703
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_185
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_703: # %cond.load717
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 181
-; CHECK-INDEXED-RV64-NEXT:    li a3, 180
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 10
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_704
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_186
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_704: # %cond.load721
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 182
-; CHECK-INDEXED-RV64-NEXT:    li a3, 181
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 9
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_705
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_187
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_705: # %cond.load725
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 183
-; CHECK-INDEXED-RV64-NEXT:    li a3, 182
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 8
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_706
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_188
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_706: # %cond.load729
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 184
-; CHECK-INDEXED-RV64-NEXT:    li a3, 183
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 7
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_707
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_189
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_707: # %cond.load733
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 185
-; CHECK-INDEXED-RV64-NEXT:    li a3, 184
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 6
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_708
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_190
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_708: # %cond.load737
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 186
-; CHECK-INDEXED-RV64-NEXT:    li a3, 185
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 5
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_709
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_191
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_709: # %cond.load741
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 187
-; CHECK-INDEXED-RV64-NEXT:    li a3, 186
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 4
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_710
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_192
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_710: # %cond.load745
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 188
-; CHECK-INDEXED-RV64-NEXT:    li a3, 187
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 3
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_711
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_193
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_711: # %cond.load749
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 189
-; CHECK-INDEXED-RV64-NEXT:    li a3, 188
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 2
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_1027
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_194
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_1027: # %cond.load749
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_195
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_712: # %cond.load761
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 192
-; CHECK-INDEXED-RV64-NEXT:    li a3, 191
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 1
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_713
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_199
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_713: # %cond.load765
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 193
-; CHECK-INDEXED-RV64-NEXT:    li a3, 192
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 2
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_714
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_200
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_714: # %cond.load769
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 194
-; CHECK-INDEXED-RV64-NEXT:    li a3, 193
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 4
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_715
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_201
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_715: # %cond.load773
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 195
-; CHECK-INDEXED-RV64-NEXT:    li a3, 194
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 8
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_716
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_202
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_716: # %cond.load777
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 196
-; CHECK-INDEXED-RV64-NEXT:    li a3, 195
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 16
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_717
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_203
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_717: # %cond.load781
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 197
-; CHECK-INDEXED-RV64-NEXT:    li a3, 196
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 32
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_718
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_204
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_718: # %cond.load785
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 198
-; CHECK-INDEXED-RV64-NEXT:    li a3, 197
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 64
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_719
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_205
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_719: # %cond.load789
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 199
-; CHECK-INDEXED-RV64-NEXT:    li a3, 198
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 128
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_720
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_206
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_720: # %cond.load793
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 200
-; CHECK-INDEXED-RV64-NEXT:    li a3, 199
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 256
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_721
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_207
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_721: # %cond.load797
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 201
-; CHECK-INDEXED-RV64-NEXT:    li a3, 200
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 512
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_722
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_208
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_722: # %cond.load801
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 202
-; CHECK-INDEXED-RV64-NEXT:    li a3, 201
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 1024
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_723
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_209
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_723: # %cond.load805
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 203
-; CHECK-INDEXED-RV64-NEXT:    li a3, 202
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 52
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_724
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_210
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_724: # %cond.load809
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 204
-; CHECK-INDEXED-RV64-NEXT:    li a3, 203
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 51
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_725
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_211
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_725: # %cond.load813
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 205
-; CHECK-INDEXED-RV64-NEXT:    li a3, 204
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 50
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_726
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_212
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_726: # %cond.load817
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 206
-; CHECK-INDEXED-RV64-NEXT:    li a3, 205
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 49
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_727
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_213
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_727: # %cond.load821
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 207
-; CHECK-INDEXED-RV64-NEXT:    li a3, 206
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 48
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_728
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_214
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_728: # %cond.load825
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 208
-; CHECK-INDEXED-RV64-NEXT:    li a3, 207
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 47
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_729
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_215
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_729: # %cond.load829
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 209
-; CHECK-INDEXED-RV64-NEXT:    li a3, 208
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 46
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_730
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_216
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_730: # %cond.load833
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 210
-; CHECK-INDEXED-RV64-NEXT:    li a3, 209
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 45
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_731
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_217
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_731: # %cond.load837
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 211
-; CHECK-INDEXED-RV64-NEXT:    li a3, 210
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 44
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_732
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_218
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_732: # %cond.load841
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 212
-; CHECK-INDEXED-RV64-NEXT:    li a3, 211
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 43
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_733
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_219
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_733: # %cond.load845
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 213
-; CHECK-INDEXED-RV64-NEXT:    li a3, 212
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 42
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_734
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_220
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_734: # %cond.load849
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 214
-; CHECK-INDEXED-RV64-NEXT:    li a3, 213
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 41
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_735
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_221
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_735: # %cond.load853
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 215
-; CHECK-INDEXED-RV64-NEXT:    li a3, 214
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 40
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_736
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_222
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_736: # %cond.load857
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 216
-; CHECK-INDEXED-RV64-NEXT:    li a3, 215
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 39
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_737
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_223
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_737: # %cond.load861
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 217
-; CHECK-INDEXED-RV64-NEXT:    li a3, 216
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 38
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_738
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_224
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_738: # %cond.load865
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 218
-; CHECK-INDEXED-RV64-NEXT:    li a3, 217
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 37
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_739
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_225
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_739: # %cond.load869
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 219
-; CHECK-INDEXED-RV64-NEXT:    li a3, 218
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 36
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_740
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_226
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_740: # %cond.load873
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 220
-; CHECK-INDEXED-RV64-NEXT:    li a3, 219
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 35
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_741
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_227
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_741: # %cond.load877
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 221
-; CHECK-INDEXED-RV64-NEXT:    li a3, 220
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 34
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_742
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_228
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_742: # %cond.load881
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 222
-; CHECK-INDEXED-RV64-NEXT:    li a3, 221
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 33
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_743
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_229
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_743: # %cond.load885
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 223
-; CHECK-INDEXED-RV64-NEXT:    li a3, 222
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 32
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_744
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_230
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_744: # %cond.load889
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 224
-; CHECK-INDEXED-RV64-NEXT:    li a3, 223
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 31
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_745
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_231
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_745: # %cond.load893
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 225
-; CHECK-INDEXED-RV64-NEXT:    li a3, 224
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 30
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_746
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_232
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_746: # %cond.load897
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 226
-; CHECK-INDEXED-RV64-NEXT:    li a3, 225
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 29
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_747
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_233
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_747: # %cond.load901
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 227
-; CHECK-INDEXED-RV64-NEXT:    li a3, 226
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 28
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_748
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_234
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_748: # %cond.load905
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 228
-; CHECK-INDEXED-RV64-NEXT:    li a3, 227
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 27
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_749
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_235
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_749: # %cond.load909
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 229
-; CHECK-INDEXED-RV64-NEXT:    li a3, 228
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 26
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_750
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_236
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_750: # %cond.load913
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 230
-; CHECK-INDEXED-RV64-NEXT:    li a3, 229
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 25
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_751
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_237
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_751: # %cond.load917
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 231
-; CHECK-INDEXED-RV64-NEXT:    li a3, 230
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 24
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_752
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_238
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_752: # %cond.load921
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 232
-; CHECK-INDEXED-RV64-NEXT:    li a3, 231
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 23
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_753
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_239
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_753: # %cond.load925
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 233
-; CHECK-INDEXED-RV64-NEXT:    li a3, 232
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 22
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_754
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_240
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_754: # %cond.load929
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 234
-; CHECK-INDEXED-RV64-NEXT:    li a3, 233
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 21
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_755
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_241
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_755: # %cond.load933
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 235
-; CHECK-INDEXED-RV64-NEXT:    li a3, 234
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 20
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_756
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_242
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_756: # %cond.load937
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 236
-; CHECK-INDEXED-RV64-NEXT:    li a3, 235
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 19
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_757
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_243
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_757: # %cond.load941
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 237
-; CHECK-INDEXED-RV64-NEXT:    li a3, 236
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 18
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_758
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_244
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_758: # %cond.load945
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 238
-; CHECK-INDEXED-RV64-NEXT:    li a3, 237
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 17
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_759
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_245
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_759: # %cond.load949
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 239
-; CHECK-INDEXED-RV64-NEXT:    li a3, 238
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 16
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_760
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_246
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_760: # %cond.load953
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 240
-; CHECK-INDEXED-RV64-NEXT:    li a3, 239
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 15
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_761
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_247
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_761: # %cond.load957
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 241
-; CHECK-INDEXED-RV64-NEXT:    li a3, 240
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 14
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_762
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_248
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_762: # %cond.load961
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 242
-; CHECK-INDEXED-RV64-NEXT:    li a3, 241
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 13
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_763
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_249
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_763: # %cond.load965
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 243
-; CHECK-INDEXED-RV64-NEXT:    li a3, 242
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 12
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_764
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_250
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_764: # %cond.load969
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 244
-; CHECK-INDEXED-RV64-NEXT:    li a3, 243
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 11
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_765
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_251
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_765: # %cond.load973
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 245
-; CHECK-INDEXED-RV64-NEXT:    li a3, 244
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 10
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_766
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_252
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_766: # %cond.load977
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 246
-; CHECK-INDEXED-RV64-NEXT:    li a3, 245
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 9
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_767
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_253
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_767: # %cond.load981
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 247
-; CHECK-INDEXED-RV64-NEXT:    li a3, 246
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 8
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_768
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_254
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_768: # %cond.load985
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 248
-; CHECK-INDEXED-RV64-NEXT:    li a3, 247
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 7
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_769
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_255
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_769: # %cond.load989
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 249
-; CHECK-INDEXED-RV64-NEXT:    li a3, 248
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 6
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_770
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_256
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_770: # %cond.load993
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 250
-; CHECK-INDEXED-RV64-NEXT:    li a3, 249
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 5
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_771
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_257
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_771: # %cond.load997
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 251
-; CHECK-INDEXED-RV64-NEXT:    li a3, 250
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 4
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_772
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_258
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_772: # %cond.load1001
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 252
-; CHECK-INDEXED-RV64-NEXT:    li a3, 251
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 3
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_773
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_259
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_773: # %cond.load1005
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a2, 253
-; CHECK-INDEXED-RV64-NEXT:    li a3, 252
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 2
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_1028
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_260
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_1028: # %cond.load1005
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_261
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_774: # %cond.load1017
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    li a1, 256
-; CHECK-INDEXED-RV64-NEXT:    li a3, 255
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    vmv4r.v v24, v8
-; CHECK-INDEXED-RV64-NEXT:    vmv8r.v v8, v24
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 1
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_775
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_265
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_775: # %cond.load1021
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 257
-; CHECK-INDEXED-RV64-NEXT:    li a3, 256
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 2
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_776
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_266
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_776: # %cond.load1025
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 258
-; CHECK-INDEXED-RV64-NEXT:    li a3, 257
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 4
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_777
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_267
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_777: # %cond.load1029
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 259
-; CHECK-INDEXED-RV64-NEXT:    li a3, 258
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 8
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_778
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_268
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_778: # %cond.load1033
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 260
-; CHECK-INDEXED-RV64-NEXT:    li a3, 259
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 16
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_779
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_269
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_779: # %cond.load1037
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 261
-; CHECK-INDEXED-RV64-NEXT:    li a3, 260
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 32
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_780
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_270
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_780: # %cond.load1041
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 262
-; CHECK-INDEXED-RV64-NEXT:    li a3, 261
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 64
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_781
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_271
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_781: # %cond.load1045
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 263
-; CHECK-INDEXED-RV64-NEXT:    li a3, 262
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 128
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_782
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_272
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_782: # %cond.load1049
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 264
-; CHECK-INDEXED-RV64-NEXT:    li a3, 263
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 256
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_783
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_273
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_783: # %cond.load1053
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 265
-; CHECK-INDEXED-RV64-NEXT:    li a3, 264
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 512
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_784
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_274
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_784: # %cond.load1057
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 266
-; CHECK-INDEXED-RV64-NEXT:    li a3, 265
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 1024
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_785
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_275
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_785: # %cond.load1061
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 267
-; CHECK-INDEXED-RV64-NEXT:    li a3, 266
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 52
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_786
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_276
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_786: # %cond.load1065
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 268
-; CHECK-INDEXED-RV64-NEXT:    li a3, 267
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 51
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_787
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_277
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_787: # %cond.load1069
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 269
-; CHECK-INDEXED-RV64-NEXT:    li a3, 268
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 50
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_788
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_278
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_788: # %cond.load1073
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 270
-; CHECK-INDEXED-RV64-NEXT:    li a3, 269
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 49
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_789
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_279
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_789: # %cond.load1077
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 271
-; CHECK-INDEXED-RV64-NEXT:    li a3, 270
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 48
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_790
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_280
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_790: # %cond.load1081
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 272
-; CHECK-INDEXED-RV64-NEXT:    li a3, 271
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 47
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_791
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_281
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_791: # %cond.load1085
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 273
-; CHECK-INDEXED-RV64-NEXT:    li a3, 272
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 46
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_792
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_282
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_792: # %cond.load1089
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 274
-; CHECK-INDEXED-RV64-NEXT:    li a3, 273
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 45
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_793
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_283
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_793: # %cond.load1093
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 275
-; CHECK-INDEXED-RV64-NEXT:    li a3, 274
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 44
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_794
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_284
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_794: # %cond.load1097
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 276
-; CHECK-INDEXED-RV64-NEXT:    li a3, 275
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 43
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_795
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_285
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_795: # %cond.load1101
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 277
-; CHECK-INDEXED-RV64-NEXT:    li a3, 276
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 42
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_796
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_286
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_796: # %cond.load1105
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 278
-; CHECK-INDEXED-RV64-NEXT:    li a3, 277
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 41
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_797
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_287
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_797: # %cond.load1109
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 279
-; CHECK-INDEXED-RV64-NEXT:    li a3, 278
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 40
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_798
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_288
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_798: # %cond.load1113
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 280
-; CHECK-INDEXED-RV64-NEXT:    li a3, 279
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 39
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_799
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_289
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_799: # %cond.load1117
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 281
-; CHECK-INDEXED-RV64-NEXT:    li a3, 280
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 38
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_800
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_290
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_800: # %cond.load1121
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 282
-; CHECK-INDEXED-RV64-NEXT:    li a3, 281
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 37
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_801
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_291
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_801: # %cond.load1125
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 283
-; CHECK-INDEXED-RV64-NEXT:    li a3, 282
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 36
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_802
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_292
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_802: # %cond.load1129
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 284
-; CHECK-INDEXED-RV64-NEXT:    li a3, 283
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 35
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_803
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_293
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_803: # %cond.load1133
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 285
-; CHECK-INDEXED-RV64-NEXT:    li a3, 284
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 34
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_804
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_294
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_804: # %cond.load1137
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 286
-; CHECK-INDEXED-RV64-NEXT:    li a3, 285
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 33
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_805
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_295
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_805: # %cond.load1141
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 287
-; CHECK-INDEXED-RV64-NEXT:    li a3, 286
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 32
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_806
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_296
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_806: # %cond.load1145
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 288
-; CHECK-INDEXED-RV64-NEXT:    li a3, 287
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 31
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_807
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_297
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_807: # %cond.load1149
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 289
-; CHECK-INDEXED-RV64-NEXT:    li a3, 288
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 30
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_808
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_298
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_808: # %cond.load1153
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 290
-; CHECK-INDEXED-RV64-NEXT:    li a3, 289
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 29
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_809
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_299
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_809: # %cond.load1157
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 291
-; CHECK-INDEXED-RV64-NEXT:    li a3, 290
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 28
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_810
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_300
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_810: # %cond.load1161
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 292
-; CHECK-INDEXED-RV64-NEXT:    li a3, 291
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 27
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_811
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_301
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_811: # %cond.load1165
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 293
-; CHECK-INDEXED-RV64-NEXT:    li a3, 292
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 26
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_812
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_302
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_812: # %cond.load1169
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 294
-; CHECK-INDEXED-RV64-NEXT:    li a3, 293
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 25
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_813
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_303
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_813: # %cond.load1173
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 295
-; CHECK-INDEXED-RV64-NEXT:    li a3, 294
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 24
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_814
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_304
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_814: # %cond.load1177
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 296
-; CHECK-INDEXED-RV64-NEXT:    li a3, 295
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 23
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_815
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_305
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_815: # %cond.load1181
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 297
-; CHECK-INDEXED-RV64-NEXT:    li a3, 296
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 22
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_816
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_306
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_816: # %cond.load1185
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 298
-; CHECK-INDEXED-RV64-NEXT:    li a3, 297
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 21
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_817
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_307
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_817: # %cond.load1189
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 299
-; CHECK-INDEXED-RV64-NEXT:    li a3, 298
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 20
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_818
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_308
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_818: # %cond.load1193
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 300
-; CHECK-INDEXED-RV64-NEXT:    li a3, 299
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 19
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_819
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_309
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_819: # %cond.load1197
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 301
-; CHECK-INDEXED-RV64-NEXT:    li a3, 300
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 18
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_820
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_310
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_820: # %cond.load1201
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 302
-; CHECK-INDEXED-RV64-NEXT:    li a3, 301
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 17
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_821
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_311
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_821: # %cond.load1205
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 303
-; CHECK-INDEXED-RV64-NEXT:    li a3, 302
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 16
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_822
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_312
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_822: # %cond.load1209
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 304
-; CHECK-INDEXED-RV64-NEXT:    li a3, 303
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 15
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_823
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_313
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_823: # %cond.load1213
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 305
-; CHECK-INDEXED-RV64-NEXT:    li a3, 304
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 14
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_824
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_314
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_824: # %cond.load1217
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 306
-; CHECK-INDEXED-RV64-NEXT:    li a3, 305
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 13
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_825
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_315
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_825: # %cond.load1221
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 307
-; CHECK-INDEXED-RV64-NEXT:    li a3, 306
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 12
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_826
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_316
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_826: # %cond.load1225
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 308
-; CHECK-INDEXED-RV64-NEXT:    li a3, 307
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 11
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_827
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_317
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_827: # %cond.load1229
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 309
-; CHECK-INDEXED-RV64-NEXT:    li a3, 308
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 10
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_828
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_318
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_828: # %cond.load1233
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 310
-; CHECK-INDEXED-RV64-NEXT:    li a3, 309
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 9
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_829
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_319
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_829: # %cond.load1237
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 311
-; CHECK-INDEXED-RV64-NEXT:    li a3, 310
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 8
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_830
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_320
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_830: # %cond.load1241
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 312
-; CHECK-INDEXED-RV64-NEXT:    li a3, 311
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 7
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_831
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_321
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_831: # %cond.load1245
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 313
-; CHECK-INDEXED-RV64-NEXT:    li a3, 312
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 6
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_832
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_322
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_832: # %cond.load1249
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 314
-; CHECK-INDEXED-RV64-NEXT:    li a3, 313
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 5
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_833
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_323
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_833: # %cond.load1253
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 315
-; CHECK-INDEXED-RV64-NEXT:    li a3, 314
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 4
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_834
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_324
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_834: # %cond.load1257
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 316
-; CHECK-INDEXED-RV64-NEXT:    li a3, 315
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 3
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_835
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_325
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_835: # %cond.load1261
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 317
-; CHECK-INDEXED-RV64-NEXT:    li a3, 316
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 2
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_1029
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_326
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_1029: # %cond.load1261
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_327
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_836: # %cond.load1273
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 320
-; CHECK-INDEXED-RV64-NEXT:    li a3, 319
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 1
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_837
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_331
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_837: # %cond.load1277
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 321
-; CHECK-INDEXED-RV64-NEXT:    li a3, 320
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 2
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_838
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_332
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_838: # %cond.load1281
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 322
-; CHECK-INDEXED-RV64-NEXT:    li a3, 321
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 4
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_839
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_333
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_839: # %cond.load1285
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 323
-; CHECK-INDEXED-RV64-NEXT:    li a3, 322
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 8
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_840
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_334
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_840: # %cond.load1289
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 324
-; CHECK-INDEXED-RV64-NEXT:    li a3, 323
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 16
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_841
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_335
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_841: # %cond.load1293
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 325
-; CHECK-INDEXED-RV64-NEXT:    li a3, 324
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 32
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_842
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_336
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_842: # %cond.load1297
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 326
-; CHECK-INDEXED-RV64-NEXT:    li a3, 325
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 64
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_843
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_337
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_843: # %cond.load1301
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 327
-; CHECK-INDEXED-RV64-NEXT:    li a3, 326
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 128
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_844
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_338
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_844: # %cond.load1305
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 328
-; CHECK-INDEXED-RV64-NEXT:    li a3, 327
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 256
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_845
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_339
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_845: # %cond.load1309
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 329
-; CHECK-INDEXED-RV64-NEXT:    li a3, 328
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 512
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_846
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_340
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_846: # %cond.load1313
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 330
-; CHECK-INDEXED-RV64-NEXT:    li a3, 329
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 1024
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_847
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_341
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_847: # %cond.load1317
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 331
-; CHECK-INDEXED-RV64-NEXT:    li a3, 330
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 52
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_848
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_342
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_848: # %cond.load1321
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 332
-; CHECK-INDEXED-RV64-NEXT:    li a3, 331
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 51
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_849
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_343
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_849: # %cond.load1325
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 333
-; CHECK-INDEXED-RV64-NEXT:    li a3, 332
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 50
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_850
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_344
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_850: # %cond.load1329
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 334
-; CHECK-INDEXED-RV64-NEXT:    li a3, 333
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 49
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_851
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_345
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_851: # %cond.load1333
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 335
-; CHECK-INDEXED-RV64-NEXT:    li a3, 334
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 48
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_852
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_346
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_852: # %cond.load1337
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 336
-; CHECK-INDEXED-RV64-NEXT:    li a3, 335
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 47
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_853
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_347
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_853: # %cond.load1341
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 337
-; CHECK-INDEXED-RV64-NEXT:    li a3, 336
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 46
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_854
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_348
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_854: # %cond.load1345
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 338
-; CHECK-INDEXED-RV64-NEXT:    li a3, 337
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 45
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_855
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_349
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_855: # %cond.load1349
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 339
-; CHECK-INDEXED-RV64-NEXT:    li a3, 338
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 44
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_856
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_350
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_856: # %cond.load1353
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 340
-; CHECK-INDEXED-RV64-NEXT:    li a3, 339
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 43
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_857
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_351
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_857: # %cond.load1357
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 341
-; CHECK-INDEXED-RV64-NEXT:    li a3, 340
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 42
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_858
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_352
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_858: # %cond.load1361
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 342
-; CHECK-INDEXED-RV64-NEXT:    li a3, 341
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 41
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_859
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_353
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_859: # %cond.load1365
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 343
-; CHECK-INDEXED-RV64-NEXT:    li a3, 342
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 40
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_860
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_354
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_860: # %cond.load1369
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 344
-; CHECK-INDEXED-RV64-NEXT:    li a3, 343
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 39
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_861
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_355
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_861: # %cond.load1373
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 345
-; CHECK-INDEXED-RV64-NEXT:    li a3, 344
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 38
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_862
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_356
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_862: # %cond.load1377
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 346
-; CHECK-INDEXED-RV64-NEXT:    li a3, 345
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 37
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_863
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_357
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_863: # %cond.load1381
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 347
-; CHECK-INDEXED-RV64-NEXT:    li a3, 346
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 36
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_864
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_358
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_864: # %cond.load1385
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 348
-; CHECK-INDEXED-RV64-NEXT:    li a3, 347
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 35
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_865
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_359
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_865: # %cond.load1389
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 349
-; CHECK-INDEXED-RV64-NEXT:    li a3, 348
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 34
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_866
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_360
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_866: # %cond.load1393
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 350
-; CHECK-INDEXED-RV64-NEXT:    li a3, 349
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 33
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_867
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_361
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_867: # %cond.load1397
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 351
-; CHECK-INDEXED-RV64-NEXT:    li a3, 350
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 32
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_868
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_362
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_868: # %cond.load1401
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 352
-; CHECK-INDEXED-RV64-NEXT:    li a3, 351
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 31
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_869
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_363
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_869: # %cond.load1405
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 353
-; CHECK-INDEXED-RV64-NEXT:    li a3, 352
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 30
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_870
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_364
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_870: # %cond.load1409
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 354
-; CHECK-INDEXED-RV64-NEXT:    li a3, 353
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 29
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_871
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_365
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_871: # %cond.load1413
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 355
-; CHECK-INDEXED-RV64-NEXT:    li a3, 354
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 28
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_872
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_366
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_872: # %cond.load1417
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 356
-; CHECK-INDEXED-RV64-NEXT:    li a3, 355
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 27
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_873
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_367
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_873: # %cond.load1421
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 357
-; CHECK-INDEXED-RV64-NEXT:    li a3, 356
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 26
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_874
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_368
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_874: # %cond.load1425
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 358
-; CHECK-INDEXED-RV64-NEXT:    li a3, 357
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 25
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_875
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_369
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_875: # %cond.load1429
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 359
-; CHECK-INDEXED-RV64-NEXT:    li a3, 358
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 24
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_876
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_370
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_876: # %cond.load1433
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 360
-; CHECK-INDEXED-RV64-NEXT:    li a3, 359
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 23
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_877
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_371
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_877: # %cond.load1437
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 361
-; CHECK-INDEXED-RV64-NEXT:    li a3, 360
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 22
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_878
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_372
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_878: # %cond.load1441
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 362
-; CHECK-INDEXED-RV64-NEXT:    li a3, 361
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 21
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_879
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_373
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_879: # %cond.load1445
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 363
-; CHECK-INDEXED-RV64-NEXT:    li a3, 362
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 20
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_880
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_374
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_880: # %cond.load1449
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 364
-; CHECK-INDEXED-RV64-NEXT:    li a3, 363
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 19
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_881
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_375
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_881: # %cond.load1453
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 365
-; CHECK-INDEXED-RV64-NEXT:    li a3, 364
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 18
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_882
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_376
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_882: # %cond.load1457
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 366
-; CHECK-INDEXED-RV64-NEXT:    li a3, 365
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 17
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_883
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_377
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_883: # %cond.load1461
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 367
-; CHECK-INDEXED-RV64-NEXT:    li a3, 366
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 16
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_884
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_378
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_884: # %cond.load1465
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 368
-; CHECK-INDEXED-RV64-NEXT:    li a3, 367
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 15
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_885
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_379
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_885: # %cond.load1469
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 369
-; CHECK-INDEXED-RV64-NEXT:    li a3, 368
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 14
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_886
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_380
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_886: # %cond.load1473
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 370
-; CHECK-INDEXED-RV64-NEXT:    li a3, 369
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 13
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_887
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_381
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_887: # %cond.load1477
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 371
-; CHECK-INDEXED-RV64-NEXT:    li a3, 370
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 12
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_888
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_382
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_888: # %cond.load1481
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 372
-; CHECK-INDEXED-RV64-NEXT:    li a3, 371
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 11
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_889
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_383
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_889: # %cond.load1485
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 373
-; CHECK-INDEXED-RV64-NEXT:    li a3, 372
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 10
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_890
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_384
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_890: # %cond.load1489
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 374
-; CHECK-INDEXED-RV64-NEXT:    li a3, 373
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 9
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_891
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_385
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_891: # %cond.load1493
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 375
-; CHECK-INDEXED-RV64-NEXT:    li a3, 374
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 8
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_892
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_386
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_892: # %cond.load1497
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 376
-; CHECK-INDEXED-RV64-NEXT:    li a3, 375
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 7
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_893
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_387
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_893: # %cond.load1501
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 377
-; CHECK-INDEXED-RV64-NEXT:    li a3, 376
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 6
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_894
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_388
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_894: # %cond.load1505
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 378
-; CHECK-INDEXED-RV64-NEXT:    li a3, 377
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 5
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_895
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_389
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_895: # %cond.load1509
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 379
-; CHECK-INDEXED-RV64-NEXT:    li a3, 378
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 4
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_896
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_390
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_896: # %cond.load1513
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 380
-; CHECK-INDEXED-RV64-NEXT:    li a3, 379
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 3
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_897
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_391
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_897: # %cond.load1517
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 381
-; CHECK-INDEXED-RV64-NEXT:    li a3, 380
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 2
-; CHECK-INDEXED-RV64-NEXT:    bgez a2, .LBB61_1030
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_392
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_1030: # %cond.load1517
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_393
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_898: # %cond.load1529
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 384
-; CHECK-INDEXED-RV64-NEXT:    li a3, 383
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 1
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_899
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_397
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_899: # %cond.load1533
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 385
-; CHECK-INDEXED-RV64-NEXT:    li a3, 384
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 2
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_900
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_398
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_900: # %cond.load1537
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 386
-; CHECK-INDEXED-RV64-NEXT:    li a3, 385
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 4
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_901
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_399
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_901: # %cond.load1541
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 387
-; CHECK-INDEXED-RV64-NEXT:    li a3, 386
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 8
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_902
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_400
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_902: # %cond.load1545
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 388
-; CHECK-INDEXED-RV64-NEXT:    li a3, 387
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 16
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_903
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_401
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_903: # %cond.load1549
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 389
-; CHECK-INDEXED-RV64-NEXT:    li a3, 388
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 32
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_904
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_402
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_904: # %cond.load1553
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 390
-; CHECK-INDEXED-RV64-NEXT:    li a3, 389
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 64
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_905
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_403
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_905: # %cond.load1557
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 391
-; CHECK-INDEXED-RV64-NEXT:    li a3, 390
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 128
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_906
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_404
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_906: # %cond.load1561
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 392
-; CHECK-INDEXED-RV64-NEXT:    li a3, 391
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 256
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_907
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_405
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_907: # %cond.load1565
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 393
-; CHECK-INDEXED-RV64-NEXT:    li a3, 392
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 512
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_908
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_406
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_908: # %cond.load1569
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 394
-; CHECK-INDEXED-RV64-NEXT:    li a3, 393
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a1, a2, 1024
-; CHECK-INDEXED-RV64-NEXT:    bnez a1, .LBB61_909
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_407
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_909: # %cond.load1573
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 395
-; CHECK-INDEXED-RV64-NEXT:    li a3, 394
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 52
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_910
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_408
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_910: # %cond.load1577
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 396
-; CHECK-INDEXED-RV64-NEXT:    li a3, 395
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 51
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_911
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_409
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_911: # %cond.load1581
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 397
-; CHECK-INDEXED-RV64-NEXT:    li a3, 396
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 50
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_912
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_410
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_912: # %cond.load1585
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 398
-; CHECK-INDEXED-RV64-NEXT:    li a3, 397
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 49
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_913
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_411
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_913: # %cond.load1589
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 399
-; CHECK-INDEXED-RV64-NEXT:    li a3, 398
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 48
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_914
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_412
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_914: # %cond.load1593
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 400
-; CHECK-INDEXED-RV64-NEXT:    li a3, 399
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 47
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_915
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_413
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_915: # %cond.load1597
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 401
-; CHECK-INDEXED-RV64-NEXT:    li a3, 400
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 46
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_916
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_414
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_916: # %cond.load1601
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 402
-; CHECK-INDEXED-RV64-NEXT:    li a3, 401
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 45
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_917
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_415
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_917: # %cond.load1605
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 403
-; CHECK-INDEXED-RV64-NEXT:    li a3, 402
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 44
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_918
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_416
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_918: # %cond.load1609
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 404
-; CHECK-INDEXED-RV64-NEXT:    li a3, 403
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 43
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_919
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_417
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_919: # %cond.load1613
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 405
-; CHECK-INDEXED-RV64-NEXT:    li a3, 404
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 42
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_920
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_418
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_920: # %cond.load1617
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 406
-; CHECK-INDEXED-RV64-NEXT:    li a3, 405
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 41
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_921
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_419
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_921: # %cond.load1621
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 407
-; CHECK-INDEXED-RV64-NEXT:    li a3, 406
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 40
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_922
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_420
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_922: # %cond.load1625
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 408
-; CHECK-INDEXED-RV64-NEXT:    li a3, 407
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 39
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_923
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_421
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_923: # %cond.load1629
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 409
-; CHECK-INDEXED-RV64-NEXT:    li a3, 408
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 38
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_924
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_422
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_924: # %cond.load1633
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 410
-; CHECK-INDEXED-RV64-NEXT:    li a3, 409
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 37
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_925
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_423
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_925: # %cond.load1637
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 411
-; CHECK-INDEXED-RV64-NEXT:    li a3, 410
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 36
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_926
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_424
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_926: # %cond.load1641
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 412
-; CHECK-INDEXED-RV64-NEXT:    li a3, 411
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 35
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_927
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_425
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_927: # %cond.load1645
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 413
-; CHECK-INDEXED-RV64-NEXT:    li a3, 412
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 34
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_928
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_426
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_928: # %cond.load1649
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 414
-; CHECK-INDEXED-RV64-NEXT:    li a3, 413
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 33
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_929
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_427
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_929: # %cond.load1653
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 415
-; CHECK-INDEXED-RV64-NEXT:    li a3, 414
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 32
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_930
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_428
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_930: # %cond.load1657
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 416
-; CHECK-INDEXED-RV64-NEXT:    li a3, 415
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 31
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_931
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_429
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_931: # %cond.load1661
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 417
-; CHECK-INDEXED-RV64-NEXT:    li a3, 416
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 30
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_932
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_430
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_932: # %cond.load1665
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 418
-; CHECK-INDEXED-RV64-NEXT:    li a3, 417
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 29
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_933
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_431
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_933: # %cond.load1669
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 419
-; CHECK-INDEXED-RV64-NEXT:    li a3, 418
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 28
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_934
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_432
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_934: # %cond.load1673
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 420
-; CHECK-INDEXED-RV64-NEXT:    li a3, 419
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 27
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_935
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_433
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_935: # %cond.load1677
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 421
-; CHECK-INDEXED-RV64-NEXT:    li a3, 420
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 26
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_936
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_434
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_936: # %cond.load1681
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 422
-; CHECK-INDEXED-RV64-NEXT:    li a3, 421
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 25
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_937
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_435
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_937: # %cond.load1685
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 423
-; CHECK-INDEXED-RV64-NEXT:    li a3, 422
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 24
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_938
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_436
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_938: # %cond.load1689
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 424
-; CHECK-INDEXED-RV64-NEXT:    li a3, 423
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 23
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_939
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_437
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_939: # %cond.load1693
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 425
-; CHECK-INDEXED-RV64-NEXT:    li a3, 424
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 22
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_940
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_438
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_940: # %cond.load1697
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 426
-; CHECK-INDEXED-RV64-NEXT:    li a3, 425
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 21
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_941
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_439
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_941: # %cond.load1701
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 427
-; CHECK-INDEXED-RV64-NEXT:    li a3, 426
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 20
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_942
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_440
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_942: # %cond.load1705
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 428
-; CHECK-INDEXED-RV64-NEXT:    li a3, 427
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 19
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_943
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_441
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_943: # %cond.load1709
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 429
-; CHECK-INDEXED-RV64-NEXT:    li a3, 428
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 18
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_944
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_442
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_944: # %cond.load1713
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 430
-; CHECK-INDEXED-RV64-NEXT:    li a3, 429
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 17
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_945
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_443
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_945: # %cond.load1717
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 431
-; CHECK-INDEXED-RV64-NEXT:    li a3, 430
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 16
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_946
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_444
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_946: # %cond.load1721
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 432
-; CHECK-INDEXED-RV64-NEXT:    li a3, 431
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 15
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_947
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_445
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_947: # %cond.load1725
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 433
-; CHECK-INDEXED-RV64-NEXT:    li a3, 432
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 14
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_948
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_446
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_948: # %cond.load1729
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 434
-; CHECK-INDEXED-RV64-NEXT:    li a3, 433
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 13
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_949
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_447
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_949: # %cond.load1733
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 435
-; CHECK-INDEXED-RV64-NEXT:    li a3, 434
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 12
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_950
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_448
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_950: # %cond.load1737
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 436
-; CHECK-INDEXED-RV64-NEXT:    li a3, 435
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 11
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_951
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_449
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_951: # %cond.load1741
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 437
-; CHECK-INDEXED-RV64-NEXT:    li a3, 436
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 10
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_952
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_450
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_952: # %cond.load1745
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 438
-; CHECK-INDEXED-RV64-NEXT:    li a3, 437
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 9
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_953
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_451
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_953: # %cond.load1749
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 439
-; CHECK-INDEXED-RV64-NEXT:    li a3, 438
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 8
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_954
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_452
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_954: # %cond.load1753
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 440
-; CHECK-INDEXED-RV64-NEXT:    li a3, 439
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 7
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_955
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_453
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_955: # %cond.load1757
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 441
-; CHECK-INDEXED-RV64-NEXT:    li a3, 440
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 6
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_956
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_454
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_956: # %cond.load1761
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 442
-; CHECK-INDEXED-RV64-NEXT:    li a3, 441
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 5
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_957
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_455
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_957: # %cond.load1765
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 443
-; CHECK-INDEXED-RV64-NEXT:    li a3, 442
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 4
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_958
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_456
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_958: # %cond.load1769
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 444
-; CHECK-INDEXED-RV64-NEXT:    li a3, 443
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 3
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_959
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_457
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_959: # %cond.load1773
-; CHECK-INDEXED-RV64-NEXT:    lbu a1, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a1
-; CHECK-INDEXED-RV64-NEXT:    li a1, 445
-; CHECK-INDEXED-RV64-NEXT:    li a3, 444
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a1, a2, 2
-; CHECK-INDEXED-RV64-NEXT:    bgez a1, .LBB61_1031
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_458
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_1031: # %cond.load1773
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_459
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_960: # %cond.load1785
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 448
-; CHECK-INDEXED-RV64-NEXT:    li a3, 447
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 1
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_961
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_463
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_961: # %cond.load1789
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 449
-; CHECK-INDEXED-RV64-NEXT:    li a3, 448
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 2
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_962
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_464
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_962: # %cond.load1793
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 450
-; CHECK-INDEXED-RV64-NEXT:    li a3, 449
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 4
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_963
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_465
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_963: # %cond.load1797
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 451
-; CHECK-INDEXED-RV64-NEXT:    li a3, 450
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 8
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_964
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_466
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_964: # %cond.load1801
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 452
-; CHECK-INDEXED-RV64-NEXT:    li a3, 451
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 16
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_965
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_467
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_965: # %cond.load1805
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 453
-; CHECK-INDEXED-RV64-NEXT:    li a3, 452
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 32
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_966
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_468
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_966: # %cond.load1809
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 454
-; CHECK-INDEXED-RV64-NEXT:    li a3, 453
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 64
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_967
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_469
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_967: # %cond.load1813
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 455
-; CHECK-INDEXED-RV64-NEXT:    li a3, 454
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 128
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_968
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_470
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_968: # %cond.load1817
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 456
-; CHECK-INDEXED-RV64-NEXT:    li a3, 455
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 256
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_969
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_471
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_969: # %cond.load1821
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 457
-; CHECK-INDEXED-RV64-NEXT:    li a3, 456
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 512
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_970
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_472
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_970: # %cond.load1825
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 458
-; CHECK-INDEXED-RV64-NEXT:    li a3, 457
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    andi a2, a1, 1024
-; CHECK-INDEXED-RV64-NEXT:    bnez a2, .LBB61_971
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_473
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_971: # %cond.load1829
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 459
-; CHECK-INDEXED-RV64-NEXT:    li a3, 458
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 52
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_972
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_474
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_972: # %cond.load1833
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 460
-; CHECK-INDEXED-RV64-NEXT:    li a3, 459
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 51
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_973
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_475
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_973: # %cond.load1837
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 461
-; CHECK-INDEXED-RV64-NEXT:    li a3, 460
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 50
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_974
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_476
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_974: # %cond.load1841
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 462
-; CHECK-INDEXED-RV64-NEXT:    li a3, 461
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 49
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_975
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_477
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_975: # %cond.load1845
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 463
-; CHECK-INDEXED-RV64-NEXT:    li a3, 462
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 48
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_976
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_478
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_976: # %cond.load1849
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 464
-; CHECK-INDEXED-RV64-NEXT:    li a3, 463
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 47
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_977
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_479
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_977: # %cond.load1853
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 465
-; CHECK-INDEXED-RV64-NEXT:    li a3, 464
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 46
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_978
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_480
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_978: # %cond.load1857
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 466
-; CHECK-INDEXED-RV64-NEXT:    li a3, 465
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 45
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_979
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_481
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_979: # %cond.load1861
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 467
-; CHECK-INDEXED-RV64-NEXT:    li a3, 466
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 44
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_980
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_482
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_980: # %cond.load1865
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 468
-; CHECK-INDEXED-RV64-NEXT:    li a3, 467
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 43
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_981
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_483
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_981: # %cond.load1869
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 469
-; CHECK-INDEXED-RV64-NEXT:    li a3, 468
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 42
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_982
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_484
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_982: # %cond.load1873
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 470
-; CHECK-INDEXED-RV64-NEXT:    li a3, 469
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 41
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_983
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_485
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_983: # %cond.load1877
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 471
-; CHECK-INDEXED-RV64-NEXT:    li a3, 470
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 40
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_984
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_486
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_984: # %cond.load1881
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 472
-; CHECK-INDEXED-RV64-NEXT:    li a3, 471
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 39
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_985
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_487
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_985: # %cond.load1885
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 473
-; CHECK-INDEXED-RV64-NEXT:    li a3, 472
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 38
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_986
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_488
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_986: # %cond.load1889
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 474
-; CHECK-INDEXED-RV64-NEXT:    li a3, 473
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 37
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_987
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_489
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_987: # %cond.load1893
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 475
-; CHECK-INDEXED-RV64-NEXT:    li a3, 474
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 36
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_988
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_490
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_988: # %cond.load1897
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 476
-; CHECK-INDEXED-RV64-NEXT:    li a3, 475
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 35
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_989
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_491
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_989: # %cond.load1901
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 477
-; CHECK-INDEXED-RV64-NEXT:    li a3, 476
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 34
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_990
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_492
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_990: # %cond.load1905
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 478
-; CHECK-INDEXED-RV64-NEXT:    li a3, 477
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 33
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_991
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_493
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_991: # %cond.load1909
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 479
-; CHECK-INDEXED-RV64-NEXT:    li a3, 478
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 32
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_992
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_494
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_992: # %cond.load1913
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 480
-; CHECK-INDEXED-RV64-NEXT:    li a3, 479
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 31
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_993
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_495
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_993: # %cond.load1917
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 481
-; CHECK-INDEXED-RV64-NEXT:    li a3, 480
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 30
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_994
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_496
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_994: # %cond.load1921
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 482
-; CHECK-INDEXED-RV64-NEXT:    li a3, 481
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 29
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_995
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_497
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_995: # %cond.load1925
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 483
-; CHECK-INDEXED-RV64-NEXT:    li a3, 482
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 28
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_996
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_498
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_996: # %cond.load1929
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 484
-; CHECK-INDEXED-RV64-NEXT:    li a3, 483
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 27
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_997
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_499
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_997: # %cond.load1933
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 485
-; CHECK-INDEXED-RV64-NEXT:    li a3, 484
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 26
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_998
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_500
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_998: # %cond.load1937
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 486
-; CHECK-INDEXED-RV64-NEXT:    li a3, 485
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 25
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_999
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_501
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_999: # %cond.load1941
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 487
-; CHECK-INDEXED-RV64-NEXT:    li a3, 486
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 24
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1000
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_502
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_1000: # %cond.load1945
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 488
-; CHECK-INDEXED-RV64-NEXT:    li a3, 487
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 23
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1001
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_503
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_1001: # %cond.load1949
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 489
-; CHECK-INDEXED-RV64-NEXT:    li a3, 488
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 22
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1002
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_504
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_1002: # %cond.load1953
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 490
-; CHECK-INDEXED-RV64-NEXT:    li a3, 489
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 21
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1003
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_505
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_1003: # %cond.load1957
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 491
-; CHECK-INDEXED-RV64-NEXT:    li a3, 490
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 20
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1004
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_506
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_1004: # %cond.load1961
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 492
-; CHECK-INDEXED-RV64-NEXT:    li a3, 491
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 19
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1005
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_507
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_1005: # %cond.load1965
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 493
-; CHECK-INDEXED-RV64-NEXT:    li a3, 492
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 18
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1006
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_508
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_1006: # %cond.load1969
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 494
-; CHECK-INDEXED-RV64-NEXT:    li a3, 493
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 17
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1007
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_509
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_1007: # %cond.load1973
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 495
-; CHECK-INDEXED-RV64-NEXT:    li a3, 494
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 16
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1008
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_510
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_1008: # %cond.load1977
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 496
-; CHECK-INDEXED-RV64-NEXT:    li a3, 495
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 15
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1009
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_511
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_1009: # %cond.load1981
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 497
-; CHECK-INDEXED-RV64-NEXT:    li a3, 496
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 14
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1010
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_512
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_1010: # %cond.load1985
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 498
-; CHECK-INDEXED-RV64-NEXT:    li a3, 497
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 13
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1011
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_513
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_1011: # %cond.load1989
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 499
-; CHECK-INDEXED-RV64-NEXT:    li a3, 498
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 12
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1012
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_514
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_1012: # %cond.load1993
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 500
-; CHECK-INDEXED-RV64-NEXT:    li a3, 499
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 11
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1013
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_515
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_1013: # %cond.load1997
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 501
-; CHECK-INDEXED-RV64-NEXT:    li a3, 500
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 10
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1014
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_516
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_1014: # %cond.load2001
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 502
-; CHECK-INDEXED-RV64-NEXT:    li a3, 501
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 9
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1015
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_517
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_1015: # %cond.load2005
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 503
-; CHECK-INDEXED-RV64-NEXT:    li a3, 502
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 8
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1016
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_518
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_1016: # %cond.load2009
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 504
-; CHECK-INDEXED-RV64-NEXT:    li a3, 503
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 7
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1017
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_519
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_1017: # %cond.load2013
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 505
-; CHECK-INDEXED-RV64-NEXT:    li a3, 504
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 6
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1018
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_520
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_1018: # %cond.load2017
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 506
-; CHECK-INDEXED-RV64-NEXT:    li a3, 505
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 5
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1019
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_521
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_1019: # %cond.load2021
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 507
-; CHECK-INDEXED-RV64-NEXT:    li a3, 506
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 4
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1020
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_522
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_1020: # %cond.load2025
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 508
-; CHECK-INDEXED-RV64-NEXT:    li a3, 507
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 3
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1021
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_523
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_1021: # %cond.load2029
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 509
-; CHECK-INDEXED-RV64-NEXT:    li a3, 508
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 2
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1022
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_524
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_1022: # %cond.load2033
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 510
-; CHECK-INDEXED-RV64-NEXT:    li a3, 509
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    slli a2, a1, 1
-; CHECK-INDEXED-RV64-NEXT:    bltz a2, .LBB61_1023
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_525
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_1023: # %cond.load2037
-; CHECK-INDEXED-RV64-NEXT:    lbu a2, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a3, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a2
-; CHECK-INDEXED-RV64-NEXT:    li a2, 511
-; CHECK-INDEXED-RV64-NEXT:    li a3, 510
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a3
-; CHECK-INDEXED-RV64-NEXT:    addi a0, a0, 1
-; CHECK-INDEXED-RV64-NEXT:    bltz a1, .LBB61_1024
-; CHECK-INDEXED-RV64-NEXT:    j .LBB61_526
-; CHECK-INDEXED-RV64-NEXT:  .LBB61_1024: # %cond.load2041
-; CHECK-INDEXED-RV64-NEXT:    lbu a0, 0(a0)
-; CHECK-INDEXED-RV64-NEXT:    li a1, 512
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    vmv.s.x v16, a0
-; CHECK-INDEXED-RV64-NEXT:    li a0, 511
-; CHECK-INDEXED-RV64-NEXT:    vslideup.vx v8, v16, a0
-; CHECK-INDEXED-RV64-NEXT:    ret
+; CHECK-RV64-LABEL: test_expandload_v512i8_vlen512:
+; CHECK-RV64:       # %bb.0:
+; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.x.s a2, v0
+; CHECK-RV64-NEXT:    andi a1, a2, 1
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_1
+; CHECK-RV64-NEXT:    j .LBB61_527
+; CHECK-RV64-NEXT:  .LBB61_1: # %else
+; CHECK-RV64-NEXT:    andi a1, a2, 2
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_2
+; CHECK-RV64-NEXT:    j .LBB61_528
+; CHECK-RV64-NEXT:  .LBB61_2: # %else2
+; CHECK-RV64-NEXT:    andi a1, a2, 4
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_3
+; CHECK-RV64-NEXT:    j .LBB61_529
+; CHECK-RV64-NEXT:  .LBB61_3: # %else6
+; CHECK-RV64-NEXT:    andi a1, a2, 8
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_4
+; CHECK-RV64-NEXT:    j .LBB61_530
+; CHECK-RV64-NEXT:  .LBB61_4: # %else10
+; CHECK-RV64-NEXT:    andi a1, a2, 16
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_5
+; CHECK-RV64-NEXT:    j .LBB61_531
+; CHECK-RV64-NEXT:  .LBB61_5: # %else14
+; CHECK-RV64-NEXT:    andi a1, a2, 32
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_6
+; CHECK-RV64-NEXT:    j .LBB61_532
+; CHECK-RV64-NEXT:  .LBB61_6: # %else18
+; CHECK-RV64-NEXT:    andi a1, a2, 64
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_7
+; CHECK-RV64-NEXT:    j .LBB61_533
+; CHECK-RV64-NEXT:  .LBB61_7: # %else22
+; CHECK-RV64-NEXT:    andi a1, a2, 128
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_8
+; CHECK-RV64-NEXT:    j .LBB61_534
+; CHECK-RV64-NEXT:  .LBB61_8: # %else26
+; CHECK-RV64-NEXT:    andi a1, a2, 256
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_9
+; CHECK-RV64-NEXT:    j .LBB61_535
+; CHECK-RV64-NEXT:  .LBB61_9: # %else30
+; CHECK-RV64-NEXT:    andi a1, a2, 512
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_10
+; CHECK-RV64-NEXT:    j .LBB61_536
+; CHECK-RV64-NEXT:  .LBB61_10: # %else34
+; CHECK-RV64-NEXT:    andi a1, a2, 1024
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_11
+; CHECK-RV64-NEXT:    j .LBB61_537
+; CHECK-RV64-NEXT:  .LBB61_11: # %else38
+; CHECK-RV64-NEXT:    slli a1, a2, 52
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_12
+; CHECK-RV64-NEXT:    j .LBB61_538
+; CHECK-RV64-NEXT:  .LBB61_12: # %else42
+; CHECK-RV64-NEXT:    slli a1, a2, 51
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_13
+; CHECK-RV64-NEXT:    j .LBB61_539
+; CHECK-RV64-NEXT:  .LBB61_13: # %else46
+; CHECK-RV64-NEXT:    slli a1, a2, 50
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_14
+; CHECK-RV64-NEXT:    j .LBB61_540
+; CHECK-RV64-NEXT:  .LBB61_14: # %else50
+; CHECK-RV64-NEXT:    slli a1, a2, 49
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_15
+; CHECK-RV64-NEXT:    j .LBB61_541
+; CHECK-RV64-NEXT:  .LBB61_15: # %else54
+; CHECK-RV64-NEXT:    slli a1, a2, 48
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_16
+; CHECK-RV64-NEXT:    j .LBB61_542
+; CHECK-RV64-NEXT:  .LBB61_16: # %else58
+; CHECK-RV64-NEXT:    slli a1, a2, 47
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_17
+; CHECK-RV64-NEXT:    j .LBB61_543
+; CHECK-RV64-NEXT:  .LBB61_17: # %else62
+; CHECK-RV64-NEXT:    slli a1, a2, 46
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_18
+; CHECK-RV64-NEXT:    j .LBB61_544
+; CHECK-RV64-NEXT:  .LBB61_18: # %else66
+; CHECK-RV64-NEXT:    slli a1, a2, 45
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_19
+; CHECK-RV64-NEXT:    j .LBB61_545
+; CHECK-RV64-NEXT:  .LBB61_19: # %else70
+; CHECK-RV64-NEXT:    slli a1, a2, 44
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_20
+; CHECK-RV64-NEXT:    j .LBB61_546
+; CHECK-RV64-NEXT:  .LBB61_20: # %else74
+; CHECK-RV64-NEXT:    slli a1, a2, 43
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_21
+; CHECK-RV64-NEXT:    j .LBB61_547
+; CHECK-RV64-NEXT:  .LBB61_21: # %else78
+; CHECK-RV64-NEXT:    slli a1, a2, 42
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_22
+; CHECK-RV64-NEXT:    j .LBB61_548
+; CHECK-RV64-NEXT:  .LBB61_22: # %else82
+; CHECK-RV64-NEXT:    slli a1, a2, 41
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_23
+; CHECK-RV64-NEXT:    j .LBB61_549
+; CHECK-RV64-NEXT:  .LBB61_23: # %else86
+; CHECK-RV64-NEXT:    slli a1, a2, 40
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_24
+; CHECK-RV64-NEXT:    j .LBB61_550
+; CHECK-RV64-NEXT:  .LBB61_24: # %else90
+; CHECK-RV64-NEXT:    slli a1, a2, 39
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_25
+; CHECK-RV64-NEXT:    j .LBB61_551
+; CHECK-RV64-NEXT:  .LBB61_25: # %else94
+; CHECK-RV64-NEXT:    slli a1, a2, 38
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_26
+; CHECK-RV64-NEXT:    j .LBB61_552
+; CHECK-RV64-NEXT:  .LBB61_26: # %else98
+; CHECK-RV64-NEXT:    slli a1, a2, 37
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_27
+; CHECK-RV64-NEXT:    j .LBB61_553
+; CHECK-RV64-NEXT:  .LBB61_27: # %else102
+; CHECK-RV64-NEXT:    slli a1, a2, 36
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_28
+; CHECK-RV64-NEXT:    j .LBB61_554
+; CHECK-RV64-NEXT:  .LBB61_28: # %else106
+; CHECK-RV64-NEXT:    slli a1, a2, 35
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_29
+; CHECK-RV64-NEXT:    j .LBB61_555
+; CHECK-RV64-NEXT:  .LBB61_29: # %else110
+; CHECK-RV64-NEXT:    slli a1, a2, 34
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_30
+; CHECK-RV64-NEXT:    j .LBB61_556
+; CHECK-RV64-NEXT:  .LBB61_30: # %else114
+; CHECK-RV64-NEXT:    slli a1, a2, 33
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_31
+; CHECK-RV64-NEXT:    j .LBB61_557
+; CHECK-RV64-NEXT:  .LBB61_31: # %else118
+; CHECK-RV64-NEXT:    slli a1, a2, 32
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_32
+; CHECK-RV64-NEXT:    j .LBB61_558
+; CHECK-RV64-NEXT:  .LBB61_32: # %else122
+; CHECK-RV64-NEXT:    slli a1, a2, 31
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_33
+; CHECK-RV64-NEXT:    j .LBB61_559
+; CHECK-RV64-NEXT:  .LBB61_33: # %else126
+; CHECK-RV64-NEXT:    slli a1, a2, 30
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_34
+; CHECK-RV64-NEXT:    j .LBB61_560
+; CHECK-RV64-NEXT:  .LBB61_34: # %else130
+; CHECK-RV64-NEXT:    slli a1, a2, 29
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_35
+; CHECK-RV64-NEXT:    j .LBB61_561
+; CHECK-RV64-NEXT:  .LBB61_35: # %else134
+; CHECK-RV64-NEXT:    slli a1, a2, 28
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_36
+; CHECK-RV64-NEXT:    j .LBB61_562
+; CHECK-RV64-NEXT:  .LBB61_36: # %else138
+; CHECK-RV64-NEXT:    slli a1, a2, 27
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_37
+; CHECK-RV64-NEXT:    j .LBB61_563
+; CHECK-RV64-NEXT:  .LBB61_37: # %else142
+; CHECK-RV64-NEXT:    slli a1, a2, 26
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_38
+; CHECK-RV64-NEXT:    j .LBB61_564
+; CHECK-RV64-NEXT:  .LBB61_38: # %else146
+; CHECK-RV64-NEXT:    slli a1, a2, 25
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_39
+; CHECK-RV64-NEXT:    j .LBB61_565
+; CHECK-RV64-NEXT:  .LBB61_39: # %else150
+; CHECK-RV64-NEXT:    slli a1, a2, 24
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_40
+; CHECK-RV64-NEXT:    j .LBB61_566
+; CHECK-RV64-NEXT:  .LBB61_40: # %else154
+; CHECK-RV64-NEXT:    slli a1, a2, 23
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_41
+; CHECK-RV64-NEXT:    j .LBB61_567
+; CHECK-RV64-NEXT:  .LBB61_41: # %else158
+; CHECK-RV64-NEXT:    slli a1, a2, 22
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_42
+; CHECK-RV64-NEXT:    j .LBB61_568
+; CHECK-RV64-NEXT:  .LBB61_42: # %else162
+; CHECK-RV64-NEXT:    slli a1, a2, 21
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_43
+; CHECK-RV64-NEXT:    j .LBB61_569
+; CHECK-RV64-NEXT:  .LBB61_43: # %else166
+; CHECK-RV64-NEXT:    slli a1, a2, 20
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_44
+; CHECK-RV64-NEXT:    j .LBB61_570
+; CHECK-RV64-NEXT:  .LBB61_44: # %else170
+; CHECK-RV64-NEXT:    slli a1, a2, 19
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_45
+; CHECK-RV64-NEXT:    j .LBB61_571
+; CHECK-RV64-NEXT:  .LBB61_45: # %else174
+; CHECK-RV64-NEXT:    slli a1, a2, 18
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_46
+; CHECK-RV64-NEXT:    j .LBB61_572
+; CHECK-RV64-NEXT:  .LBB61_46: # %else178
+; CHECK-RV64-NEXT:    slli a1, a2, 17
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_47
+; CHECK-RV64-NEXT:    j .LBB61_573
+; CHECK-RV64-NEXT:  .LBB61_47: # %else182
+; CHECK-RV64-NEXT:    slli a1, a2, 16
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_48
+; CHECK-RV64-NEXT:    j .LBB61_574
+; CHECK-RV64-NEXT:  .LBB61_48: # %else186
+; CHECK-RV64-NEXT:    slli a1, a2, 15
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_49
+; CHECK-RV64-NEXT:    j .LBB61_575
+; CHECK-RV64-NEXT:  .LBB61_49: # %else190
+; CHECK-RV64-NEXT:    slli a1, a2, 14
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_50
+; CHECK-RV64-NEXT:    j .LBB61_576
+; CHECK-RV64-NEXT:  .LBB61_50: # %else194
+; CHECK-RV64-NEXT:    slli a1, a2, 13
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_51
+; CHECK-RV64-NEXT:    j .LBB61_577
+; CHECK-RV64-NEXT:  .LBB61_51: # %else198
+; CHECK-RV64-NEXT:    slli a1, a2, 12
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_52
+; CHECK-RV64-NEXT:    j .LBB61_578
+; CHECK-RV64-NEXT:  .LBB61_52: # %else202
+; CHECK-RV64-NEXT:    slli a1, a2, 11
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_53
+; CHECK-RV64-NEXT:    j .LBB61_579
+; CHECK-RV64-NEXT:  .LBB61_53: # %else206
+; CHECK-RV64-NEXT:    slli a1, a2, 10
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_54
+; CHECK-RV64-NEXT:    j .LBB61_580
+; CHECK-RV64-NEXT:  .LBB61_54: # %else210
+; CHECK-RV64-NEXT:    slli a1, a2, 9
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_55
+; CHECK-RV64-NEXT:    j .LBB61_581
+; CHECK-RV64-NEXT:  .LBB61_55: # %else214
+; CHECK-RV64-NEXT:    slli a1, a2, 8
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_56
+; CHECK-RV64-NEXT:    j .LBB61_582
+; CHECK-RV64-NEXT:  .LBB61_56: # %else218
+; CHECK-RV64-NEXT:    slli a1, a2, 7
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_57
+; CHECK-RV64-NEXT:    j .LBB61_583
+; CHECK-RV64-NEXT:  .LBB61_57: # %else222
+; CHECK-RV64-NEXT:    slli a1, a2, 6
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_58
+; CHECK-RV64-NEXT:    j .LBB61_584
+; CHECK-RV64-NEXT:  .LBB61_58: # %else226
+; CHECK-RV64-NEXT:    slli a1, a2, 5
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_59
+; CHECK-RV64-NEXT:    j .LBB61_585
+; CHECK-RV64-NEXT:  .LBB61_59: # %else230
+; CHECK-RV64-NEXT:    slli a1, a2, 4
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_60
+; CHECK-RV64-NEXT:    j .LBB61_586
+; CHECK-RV64-NEXT:  .LBB61_60: # %else234
+; CHECK-RV64-NEXT:    slli a1, a2, 3
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_61
+; CHECK-RV64-NEXT:    j .LBB61_587
+; CHECK-RV64-NEXT:  .LBB61_61: # %else238
+; CHECK-RV64-NEXT:    slli a1, a2, 2
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_63
+; CHECK-RV64-NEXT:  .LBB61_62: # %cond.load241
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 62
+; CHECK-RV64-NEXT:    li a3, 61
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:  .LBB61_63: # %else242
+; CHECK-RV64-NEXT:    slli a1, a2, 1
+; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV64-NEXT:    vslidedown.vi v16, v0, 1
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_65
+; CHECK-RV64-NEXT:  # %bb.64: # %cond.load245
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vmv.s.x v17, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 63
+; CHECK-RV64-NEXT:    li a3, 62
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v17, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:  .LBB61_65: # %else246
+; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.x.s a1, v16
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_66
+; CHECK-RV64-NEXT:    j .LBB61_588
+; CHECK-RV64-NEXT:  .LBB61_66: # %else250
+; CHECK-RV64-NEXT:    andi a2, a1, 1
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_67
+; CHECK-RV64-NEXT:    j .LBB61_589
+; CHECK-RV64-NEXT:  .LBB61_67: # %else254
+; CHECK-RV64-NEXT:    andi a2, a1, 2
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_68
+; CHECK-RV64-NEXT:    j .LBB61_590
+; CHECK-RV64-NEXT:  .LBB61_68: # %else258
+; CHECK-RV64-NEXT:    andi a2, a1, 4
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_69
+; CHECK-RV64-NEXT:    j .LBB61_591
+; CHECK-RV64-NEXT:  .LBB61_69: # %else262
+; CHECK-RV64-NEXT:    andi a2, a1, 8
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_70
+; CHECK-RV64-NEXT:    j .LBB61_592
+; CHECK-RV64-NEXT:  .LBB61_70: # %else266
+; CHECK-RV64-NEXT:    andi a2, a1, 16
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_71
+; CHECK-RV64-NEXT:    j .LBB61_593
+; CHECK-RV64-NEXT:  .LBB61_71: # %else270
+; CHECK-RV64-NEXT:    andi a2, a1, 32
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_72
+; CHECK-RV64-NEXT:    j .LBB61_594
+; CHECK-RV64-NEXT:  .LBB61_72: # %else274
+; CHECK-RV64-NEXT:    andi a2, a1, 64
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_73
+; CHECK-RV64-NEXT:    j .LBB61_595
+; CHECK-RV64-NEXT:  .LBB61_73: # %else278
+; CHECK-RV64-NEXT:    andi a2, a1, 128
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_74
+; CHECK-RV64-NEXT:    j .LBB61_596
+; CHECK-RV64-NEXT:  .LBB61_74: # %else282
+; CHECK-RV64-NEXT:    andi a2, a1, 256
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_75
+; CHECK-RV64-NEXT:    j .LBB61_597
+; CHECK-RV64-NEXT:  .LBB61_75: # %else286
+; CHECK-RV64-NEXT:    andi a2, a1, 512
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_76
+; CHECK-RV64-NEXT:    j .LBB61_598
+; CHECK-RV64-NEXT:  .LBB61_76: # %else290
+; CHECK-RV64-NEXT:    andi a2, a1, 1024
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_77
+; CHECK-RV64-NEXT:    j .LBB61_599
+; CHECK-RV64-NEXT:  .LBB61_77: # %else294
+; CHECK-RV64-NEXT:    slli a2, a1, 52
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_78
+; CHECK-RV64-NEXT:    j .LBB61_600
+; CHECK-RV64-NEXT:  .LBB61_78: # %else298
+; CHECK-RV64-NEXT:    slli a2, a1, 51
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_79
+; CHECK-RV64-NEXT:    j .LBB61_601
+; CHECK-RV64-NEXT:  .LBB61_79: # %else302
+; CHECK-RV64-NEXT:    slli a2, a1, 50
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_80
+; CHECK-RV64-NEXT:    j .LBB61_602
+; CHECK-RV64-NEXT:  .LBB61_80: # %else306
+; CHECK-RV64-NEXT:    slli a2, a1, 49
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_81
+; CHECK-RV64-NEXT:    j .LBB61_603
+; CHECK-RV64-NEXT:  .LBB61_81: # %else310
+; CHECK-RV64-NEXT:    slli a2, a1, 48
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_82
+; CHECK-RV64-NEXT:    j .LBB61_604
+; CHECK-RV64-NEXT:  .LBB61_82: # %else314
+; CHECK-RV64-NEXT:    slli a2, a1, 47
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_83
+; CHECK-RV64-NEXT:    j .LBB61_605
+; CHECK-RV64-NEXT:  .LBB61_83: # %else318
+; CHECK-RV64-NEXT:    slli a2, a1, 46
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_84
+; CHECK-RV64-NEXT:    j .LBB61_606
+; CHECK-RV64-NEXT:  .LBB61_84: # %else322
+; CHECK-RV64-NEXT:    slli a2, a1, 45
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_85
+; CHECK-RV64-NEXT:    j .LBB61_607
+; CHECK-RV64-NEXT:  .LBB61_85: # %else326
+; CHECK-RV64-NEXT:    slli a2, a1, 44
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_86
+; CHECK-RV64-NEXT:    j .LBB61_608
+; CHECK-RV64-NEXT:  .LBB61_86: # %else330
+; CHECK-RV64-NEXT:    slli a2, a1, 43
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_87
+; CHECK-RV64-NEXT:    j .LBB61_609
+; CHECK-RV64-NEXT:  .LBB61_87: # %else334
+; CHECK-RV64-NEXT:    slli a2, a1, 42
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_88
+; CHECK-RV64-NEXT:    j .LBB61_610
+; CHECK-RV64-NEXT:  .LBB61_88: # %else338
+; CHECK-RV64-NEXT:    slli a2, a1, 41
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_89
+; CHECK-RV64-NEXT:    j .LBB61_611
+; CHECK-RV64-NEXT:  .LBB61_89: # %else342
+; CHECK-RV64-NEXT:    slli a2, a1, 40
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_90
+; CHECK-RV64-NEXT:    j .LBB61_612
+; CHECK-RV64-NEXT:  .LBB61_90: # %else346
+; CHECK-RV64-NEXT:    slli a2, a1, 39
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_91
+; CHECK-RV64-NEXT:    j .LBB61_613
+; CHECK-RV64-NEXT:  .LBB61_91: # %else350
+; CHECK-RV64-NEXT:    slli a2, a1, 38
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_92
+; CHECK-RV64-NEXT:    j .LBB61_614
+; CHECK-RV64-NEXT:  .LBB61_92: # %else354
+; CHECK-RV64-NEXT:    slli a2, a1, 37
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_93
+; CHECK-RV64-NEXT:    j .LBB61_615
+; CHECK-RV64-NEXT:  .LBB61_93: # %else358
+; CHECK-RV64-NEXT:    slli a2, a1, 36
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_94
+; CHECK-RV64-NEXT:    j .LBB61_616
+; CHECK-RV64-NEXT:  .LBB61_94: # %else362
+; CHECK-RV64-NEXT:    slli a2, a1, 35
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_95
+; CHECK-RV64-NEXT:    j .LBB61_617
+; CHECK-RV64-NEXT:  .LBB61_95: # %else366
+; CHECK-RV64-NEXT:    slli a2, a1, 34
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_96
+; CHECK-RV64-NEXT:    j .LBB61_618
+; CHECK-RV64-NEXT:  .LBB61_96: # %else370
+; CHECK-RV64-NEXT:    slli a2, a1, 33
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_97
+; CHECK-RV64-NEXT:    j .LBB61_619
+; CHECK-RV64-NEXT:  .LBB61_97: # %else374
+; CHECK-RV64-NEXT:    slli a2, a1, 32
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_98
+; CHECK-RV64-NEXT:    j .LBB61_620
+; CHECK-RV64-NEXT:  .LBB61_98: # %else378
+; CHECK-RV64-NEXT:    slli a2, a1, 31
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_99
+; CHECK-RV64-NEXT:    j .LBB61_621
+; CHECK-RV64-NEXT:  .LBB61_99: # %else382
+; CHECK-RV64-NEXT:    slli a2, a1, 30
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_100
+; CHECK-RV64-NEXT:    j .LBB61_622
+; CHECK-RV64-NEXT:  .LBB61_100: # %else386
+; CHECK-RV64-NEXT:    slli a2, a1, 29
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_101
+; CHECK-RV64-NEXT:    j .LBB61_623
+; CHECK-RV64-NEXT:  .LBB61_101: # %else390
+; CHECK-RV64-NEXT:    slli a2, a1, 28
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_102
+; CHECK-RV64-NEXT:    j .LBB61_624
+; CHECK-RV64-NEXT:  .LBB61_102: # %else394
+; CHECK-RV64-NEXT:    slli a2, a1, 27
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_103
+; CHECK-RV64-NEXT:    j .LBB61_625
+; CHECK-RV64-NEXT:  .LBB61_103: # %else398
+; CHECK-RV64-NEXT:    slli a2, a1, 26
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_104
+; CHECK-RV64-NEXT:    j .LBB61_626
+; CHECK-RV64-NEXT:  .LBB61_104: # %else402
+; CHECK-RV64-NEXT:    slli a2, a1, 25
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_105
+; CHECK-RV64-NEXT:    j .LBB61_627
+; CHECK-RV64-NEXT:  .LBB61_105: # %else406
+; CHECK-RV64-NEXT:    slli a2, a1, 24
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_106
+; CHECK-RV64-NEXT:    j .LBB61_628
+; CHECK-RV64-NEXT:  .LBB61_106: # %else410
+; CHECK-RV64-NEXT:    slli a2, a1, 23
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_107
+; CHECK-RV64-NEXT:    j .LBB61_629
+; CHECK-RV64-NEXT:  .LBB61_107: # %else414
+; CHECK-RV64-NEXT:    slli a2, a1, 22
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_108
+; CHECK-RV64-NEXT:    j .LBB61_630
+; CHECK-RV64-NEXT:  .LBB61_108: # %else418
+; CHECK-RV64-NEXT:    slli a2, a1, 21
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_109
+; CHECK-RV64-NEXT:    j .LBB61_631
+; CHECK-RV64-NEXT:  .LBB61_109: # %else422
+; CHECK-RV64-NEXT:    slli a2, a1, 20
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_110
+; CHECK-RV64-NEXT:    j .LBB61_632
+; CHECK-RV64-NEXT:  .LBB61_110: # %else426
+; CHECK-RV64-NEXT:    slli a2, a1, 19
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_111
+; CHECK-RV64-NEXT:    j .LBB61_633
+; CHECK-RV64-NEXT:  .LBB61_111: # %else430
+; CHECK-RV64-NEXT:    slli a2, a1, 18
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_112
+; CHECK-RV64-NEXT:    j .LBB61_634
+; CHECK-RV64-NEXT:  .LBB61_112: # %else434
+; CHECK-RV64-NEXT:    slli a2, a1, 17
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_113
+; CHECK-RV64-NEXT:    j .LBB61_635
+; CHECK-RV64-NEXT:  .LBB61_113: # %else438
+; CHECK-RV64-NEXT:    slli a2, a1, 16
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_114
+; CHECK-RV64-NEXT:    j .LBB61_636
+; CHECK-RV64-NEXT:  .LBB61_114: # %else442
+; CHECK-RV64-NEXT:    slli a2, a1, 15
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_115
+; CHECK-RV64-NEXT:    j .LBB61_637
+; CHECK-RV64-NEXT:  .LBB61_115: # %else446
+; CHECK-RV64-NEXT:    slli a2, a1, 14
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_116
+; CHECK-RV64-NEXT:    j .LBB61_638
+; CHECK-RV64-NEXT:  .LBB61_116: # %else450
+; CHECK-RV64-NEXT:    slli a2, a1, 13
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_117
+; CHECK-RV64-NEXT:    j .LBB61_639
+; CHECK-RV64-NEXT:  .LBB61_117: # %else454
+; CHECK-RV64-NEXT:    slli a2, a1, 12
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_118
+; CHECK-RV64-NEXT:    j .LBB61_640
+; CHECK-RV64-NEXT:  .LBB61_118: # %else458
+; CHECK-RV64-NEXT:    slli a2, a1, 11
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_119
+; CHECK-RV64-NEXT:    j .LBB61_641
+; CHECK-RV64-NEXT:  .LBB61_119: # %else462
+; CHECK-RV64-NEXT:    slli a2, a1, 10
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_120
+; CHECK-RV64-NEXT:    j .LBB61_642
+; CHECK-RV64-NEXT:  .LBB61_120: # %else466
+; CHECK-RV64-NEXT:    slli a2, a1, 9
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_121
+; CHECK-RV64-NEXT:    j .LBB61_643
+; CHECK-RV64-NEXT:  .LBB61_121: # %else470
+; CHECK-RV64-NEXT:    slli a2, a1, 8
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_122
+; CHECK-RV64-NEXT:    j .LBB61_644
+; CHECK-RV64-NEXT:  .LBB61_122: # %else474
+; CHECK-RV64-NEXT:    slli a2, a1, 7
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_123
+; CHECK-RV64-NEXT:    j .LBB61_645
+; CHECK-RV64-NEXT:  .LBB61_123: # %else478
+; CHECK-RV64-NEXT:    slli a2, a1, 6
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_124
+; CHECK-RV64-NEXT:    j .LBB61_646
+; CHECK-RV64-NEXT:  .LBB61_124: # %else482
+; CHECK-RV64-NEXT:    slli a2, a1, 5
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_125
+; CHECK-RV64-NEXT:    j .LBB61_647
+; CHECK-RV64-NEXT:  .LBB61_125: # %else486
+; CHECK-RV64-NEXT:    slli a2, a1, 4
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_126
+; CHECK-RV64-NEXT:    j .LBB61_648
+; CHECK-RV64-NEXT:  .LBB61_126: # %else490
+; CHECK-RV64-NEXT:    slli a2, a1, 3
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_127
+; CHECK-RV64-NEXT:    j .LBB61_649
+; CHECK-RV64-NEXT:  .LBB61_127: # %else494
+; CHECK-RV64-NEXT:    slli a2, a1, 2
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_129
+; CHECK-RV64-NEXT:  .LBB61_128: # %cond.load497
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 126
+; CHECK-RV64-NEXT:    li a3, 125
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:  .LBB61_129: # %else498
+; CHECK-RV64-NEXT:    slli a2, a1, 1
+; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV64-NEXT:    vslidedown.vi v16, v0, 2
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_131
+; CHECK-RV64-NEXT:  # %bb.130: # %cond.load501
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    vmv.s.x v18, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 127
+; CHECK-RV64-NEXT:    li a3, 126
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v18, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:  .LBB61_131: # %else502
+; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.x.s a2, v16
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_132
+; CHECK-RV64-NEXT:    j .LBB61_650
+; CHECK-RV64-NEXT:  .LBB61_132: # %else506
+; CHECK-RV64-NEXT:    andi a1, a2, 1
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_133
+; CHECK-RV64-NEXT:    j .LBB61_651
+; CHECK-RV64-NEXT:  .LBB61_133: # %else510
+; CHECK-RV64-NEXT:    andi a1, a2, 2
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_134
+; CHECK-RV64-NEXT:    j .LBB61_652
+; CHECK-RV64-NEXT:  .LBB61_134: # %else514
+; CHECK-RV64-NEXT:    andi a1, a2, 4
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_135
+; CHECK-RV64-NEXT:    j .LBB61_653
+; CHECK-RV64-NEXT:  .LBB61_135: # %else518
+; CHECK-RV64-NEXT:    andi a1, a2, 8
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_136
+; CHECK-RV64-NEXT:    j .LBB61_654
+; CHECK-RV64-NEXT:  .LBB61_136: # %else522
+; CHECK-RV64-NEXT:    andi a1, a2, 16
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_137
+; CHECK-RV64-NEXT:    j .LBB61_655
+; CHECK-RV64-NEXT:  .LBB61_137: # %else526
+; CHECK-RV64-NEXT:    andi a1, a2, 32
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_138
+; CHECK-RV64-NEXT:    j .LBB61_656
+; CHECK-RV64-NEXT:  .LBB61_138: # %else530
+; CHECK-RV64-NEXT:    andi a1, a2, 64
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_139
+; CHECK-RV64-NEXT:    j .LBB61_657
+; CHECK-RV64-NEXT:  .LBB61_139: # %else534
+; CHECK-RV64-NEXT:    andi a1, a2, 128
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_140
+; CHECK-RV64-NEXT:    j .LBB61_658
+; CHECK-RV64-NEXT:  .LBB61_140: # %else538
+; CHECK-RV64-NEXT:    andi a1, a2, 256
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_141
+; CHECK-RV64-NEXT:    j .LBB61_659
+; CHECK-RV64-NEXT:  .LBB61_141: # %else542
+; CHECK-RV64-NEXT:    andi a1, a2, 512
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_142
+; CHECK-RV64-NEXT:    j .LBB61_660
+; CHECK-RV64-NEXT:  .LBB61_142: # %else546
+; CHECK-RV64-NEXT:    andi a1, a2, 1024
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_143
+; CHECK-RV64-NEXT:    j .LBB61_661
+; CHECK-RV64-NEXT:  .LBB61_143: # %else550
+; CHECK-RV64-NEXT:    slli a1, a2, 52
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_144
+; CHECK-RV64-NEXT:    j .LBB61_662
+; CHECK-RV64-NEXT:  .LBB61_144: # %else554
+; CHECK-RV64-NEXT:    slli a1, a2, 51
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_145
+; CHECK-RV64-NEXT:    j .LBB61_663
+; CHECK-RV64-NEXT:  .LBB61_145: # %else558
+; CHECK-RV64-NEXT:    slli a1, a2, 50
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_146
+; CHECK-RV64-NEXT:    j .LBB61_664
+; CHECK-RV64-NEXT:  .LBB61_146: # %else562
+; CHECK-RV64-NEXT:    slli a1, a2, 49
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_147
+; CHECK-RV64-NEXT:    j .LBB61_665
+; CHECK-RV64-NEXT:  .LBB61_147: # %else566
+; CHECK-RV64-NEXT:    slli a1, a2, 48
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_148
+; CHECK-RV64-NEXT:    j .LBB61_666
+; CHECK-RV64-NEXT:  .LBB61_148: # %else570
+; CHECK-RV64-NEXT:    slli a1, a2, 47
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_149
+; CHECK-RV64-NEXT:    j .LBB61_667
+; CHECK-RV64-NEXT:  .LBB61_149: # %else574
+; CHECK-RV64-NEXT:    slli a1, a2, 46
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_150
+; CHECK-RV64-NEXT:    j .LBB61_668
+; CHECK-RV64-NEXT:  .LBB61_150: # %else578
+; CHECK-RV64-NEXT:    slli a1, a2, 45
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_151
+; CHECK-RV64-NEXT:    j .LBB61_669
+; CHECK-RV64-NEXT:  .LBB61_151: # %else582
+; CHECK-RV64-NEXT:    slli a1, a2, 44
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_152
+; CHECK-RV64-NEXT:    j .LBB61_670
+; CHECK-RV64-NEXT:  .LBB61_152: # %else586
+; CHECK-RV64-NEXT:    slli a1, a2, 43
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_153
+; CHECK-RV64-NEXT:    j .LBB61_671
+; CHECK-RV64-NEXT:  .LBB61_153: # %else590
+; CHECK-RV64-NEXT:    slli a1, a2, 42
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_154
+; CHECK-RV64-NEXT:    j .LBB61_672
+; CHECK-RV64-NEXT:  .LBB61_154: # %else594
+; CHECK-RV64-NEXT:    slli a1, a2, 41
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_155
+; CHECK-RV64-NEXT:    j .LBB61_673
+; CHECK-RV64-NEXT:  .LBB61_155: # %else598
+; CHECK-RV64-NEXT:    slli a1, a2, 40
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_156
+; CHECK-RV64-NEXT:    j .LBB61_674
+; CHECK-RV64-NEXT:  .LBB61_156: # %else602
+; CHECK-RV64-NEXT:    slli a1, a2, 39
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_157
+; CHECK-RV64-NEXT:    j .LBB61_675
+; CHECK-RV64-NEXT:  .LBB61_157: # %else606
+; CHECK-RV64-NEXT:    slli a1, a2, 38
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_158
+; CHECK-RV64-NEXT:    j .LBB61_676
+; CHECK-RV64-NEXT:  .LBB61_158: # %else610
+; CHECK-RV64-NEXT:    slli a1, a2, 37
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_159
+; CHECK-RV64-NEXT:    j .LBB61_677
+; CHECK-RV64-NEXT:  .LBB61_159: # %else614
+; CHECK-RV64-NEXT:    slli a1, a2, 36
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_160
+; CHECK-RV64-NEXT:    j .LBB61_678
+; CHECK-RV64-NEXT:  .LBB61_160: # %else618
+; CHECK-RV64-NEXT:    slli a1, a2, 35
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_161
+; CHECK-RV64-NEXT:    j .LBB61_679
+; CHECK-RV64-NEXT:  .LBB61_161: # %else622
+; CHECK-RV64-NEXT:    slli a1, a2, 34
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_162
+; CHECK-RV64-NEXT:    j .LBB61_680
+; CHECK-RV64-NEXT:  .LBB61_162: # %else626
+; CHECK-RV64-NEXT:    slli a1, a2, 33
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_163
+; CHECK-RV64-NEXT:    j .LBB61_681
+; CHECK-RV64-NEXT:  .LBB61_163: # %else630
+; CHECK-RV64-NEXT:    slli a1, a2, 32
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_164
+; CHECK-RV64-NEXT:    j .LBB61_682
+; CHECK-RV64-NEXT:  .LBB61_164: # %else634
+; CHECK-RV64-NEXT:    slli a1, a2, 31
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_165
+; CHECK-RV64-NEXT:    j .LBB61_683
+; CHECK-RV64-NEXT:  .LBB61_165: # %else638
+; CHECK-RV64-NEXT:    slli a1, a2, 30
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_166
+; CHECK-RV64-NEXT:    j .LBB61_684
+; CHECK-RV64-NEXT:  .LBB61_166: # %else642
+; CHECK-RV64-NEXT:    slli a1, a2, 29
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_167
+; CHECK-RV64-NEXT:    j .LBB61_685
+; CHECK-RV64-NEXT:  .LBB61_167: # %else646
+; CHECK-RV64-NEXT:    slli a1, a2, 28
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_168
+; CHECK-RV64-NEXT:    j .LBB61_686
+; CHECK-RV64-NEXT:  .LBB61_168: # %else650
+; CHECK-RV64-NEXT:    slli a1, a2, 27
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_169
+; CHECK-RV64-NEXT:    j .LBB61_687
+; CHECK-RV64-NEXT:  .LBB61_169: # %else654
+; CHECK-RV64-NEXT:    slli a1, a2, 26
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_170
+; CHECK-RV64-NEXT:    j .LBB61_688
+; CHECK-RV64-NEXT:  .LBB61_170: # %else658
+; CHECK-RV64-NEXT:    slli a1, a2, 25
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_171
+; CHECK-RV64-NEXT:    j .LBB61_689
+; CHECK-RV64-NEXT:  .LBB61_171: # %else662
+; CHECK-RV64-NEXT:    slli a1, a2, 24
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_172
+; CHECK-RV64-NEXT:    j .LBB61_690
+; CHECK-RV64-NEXT:  .LBB61_172: # %else666
+; CHECK-RV64-NEXT:    slli a1, a2, 23
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_173
+; CHECK-RV64-NEXT:    j .LBB61_691
+; CHECK-RV64-NEXT:  .LBB61_173: # %else670
+; CHECK-RV64-NEXT:    slli a1, a2, 22
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_174
+; CHECK-RV64-NEXT:    j .LBB61_692
+; CHECK-RV64-NEXT:  .LBB61_174: # %else674
+; CHECK-RV64-NEXT:    slli a1, a2, 21
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_175
+; CHECK-RV64-NEXT:    j .LBB61_693
+; CHECK-RV64-NEXT:  .LBB61_175: # %else678
+; CHECK-RV64-NEXT:    slli a1, a2, 20
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_176
+; CHECK-RV64-NEXT:    j .LBB61_694
+; CHECK-RV64-NEXT:  .LBB61_176: # %else682
+; CHECK-RV64-NEXT:    slli a1, a2, 19
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_177
+; CHECK-RV64-NEXT:    j .LBB61_695
+; CHECK-RV64-NEXT:  .LBB61_177: # %else686
+; CHECK-RV64-NEXT:    slli a1, a2, 18
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_178
+; CHECK-RV64-NEXT:    j .LBB61_696
+; CHECK-RV64-NEXT:  .LBB61_178: # %else690
+; CHECK-RV64-NEXT:    slli a1, a2, 17
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_179
+; CHECK-RV64-NEXT:    j .LBB61_697
+; CHECK-RV64-NEXT:  .LBB61_179: # %else694
+; CHECK-RV64-NEXT:    slli a1, a2, 16
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_180
+; CHECK-RV64-NEXT:    j .LBB61_698
+; CHECK-RV64-NEXT:  .LBB61_180: # %else698
+; CHECK-RV64-NEXT:    slli a1, a2, 15
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_181
+; CHECK-RV64-NEXT:    j .LBB61_699
+; CHECK-RV64-NEXT:  .LBB61_181: # %else702
+; CHECK-RV64-NEXT:    slli a1, a2, 14
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_182
+; CHECK-RV64-NEXT:    j .LBB61_700
+; CHECK-RV64-NEXT:  .LBB61_182: # %else706
+; CHECK-RV64-NEXT:    slli a1, a2, 13
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_183
+; CHECK-RV64-NEXT:    j .LBB61_701
+; CHECK-RV64-NEXT:  .LBB61_183: # %else710
+; CHECK-RV64-NEXT:    slli a1, a2, 12
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_184
+; CHECK-RV64-NEXT:    j .LBB61_702
+; CHECK-RV64-NEXT:  .LBB61_184: # %else714
+; CHECK-RV64-NEXT:    slli a1, a2, 11
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_185
+; CHECK-RV64-NEXT:    j .LBB61_703
+; CHECK-RV64-NEXT:  .LBB61_185: # %else718
+; CHECK-RV64-NEXT:    slli a1, a2, 10
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_186
+; CHECK-RV64-NEXT:    j .LBB61_704
+; CHECK-RV64-NEXT:  .LBB61_186: # %else722
+; CHECK-RV64-NEXT:    slli a1, a2, 9
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_187
+; CHECK-RV64-NEXT:    j .LBB61_705
+; CHECK-RV64-NEXT:  .LBB61_187: # %else726
+; CHECK-RV64-NEXT:    slli a1, a2, 8
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_188
+; CHECK-RV64-NEXT:    j .LBB61_706
+; CHECK-RV64-NEXT:  .LBB61_188: # %else730
+; CHECK-RV64-NEXT:    slli a1, a2, 7
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_189
+; CHECK-RV64-NEXT:    j .LBB61_707
+; CHECK-RV64-NEXT:  .LBB61_189: # %else734
+; CHECK-RV64-NEXT:    slli a1, a2, 6
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_190
+; CHECK-RV64-NEXT:    j .LBB61_708
+; CHECK-RV64-NEXT:  .LBB61_190: # %else738
+; CHECK-RV64-NEXT:    slli a1, a2, 5
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_191
+; CHECK-RV64-NEXT:    j .LBB61_709
+; CHECK-RV64-NEXT:  .LBB61_191: # %else742
+; CHECK-RV64-NEXT:    slli a1, a2, 4
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_192
+; CHECK-RV64-NEXT:    j .LBB61_710
+; CHECK-RV64-NEXT:  .LBB61_192: # %else746
+; CHECK-RV64-NEXT:    slli a1, a2, 3
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_193
+; CHECK-RV64-NEXT:    j .LBB61_711
+; CHECK-RV64-NEXT:  .LBB61_193: # %else750
+; CHECK-RV64-NEXT:    slli a1, a2, 2
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_195
+; CHECK-RV64-NEXT:  .LBB61_194: # %cond.load753
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 190
+; CHECK-RV64-NEXT:    li a3, 189
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:  .LBB61_195: # %else754
+; CHECK-RV64-NEXT:    slli a1, a2, 1
+; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV64-NEXT:    vslidedown.vi v16, v0, 3
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_197
+; CHECK-RV64-NEXT:  # %bb.196: # %cond.load757
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vmv.s.x v20, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 191
+; CHECK-RV64-NEXT:    li a3, 190
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v20, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:  .LBB61_197: # %else758
+; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.x.s a1, v16
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_198
+; CHECK-RV64-NEXT:    j .LBB61_712
+; CHECK-RV64-NEXT:  .LBB61_198: # %else762
+; CHECK-RV64-NEXT:    andi a2, a1, 1
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_199
+; CHECK-RV64-NEXT:    j .LBB61_713
+; CHECK-RV64-NEXT:  .LBB61_199: # %else766
+; CHECK-RV64-NEXT:    andi a2, a1, 2
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_200
+; CHECK-RV64-NEXT:    j .LBB61_714
+; CHECK-RV64-NEXT:  .LBB61_200: # %else770
+; CHECK-RV64-NEXT:    andi a2, a1, 4
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_201
+; CHECK-RV64-NEXT:    j .LBB61_715
+; CHECK-RV64-NEXT:  .LBB61_201: # %else774
+; CHECK-RV64-NEXT:    andi a2, a1, 8
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_202
+; CHECK-RV64-NEXT:    j .LBB61_716
+; CHECK-RV64-NEXT:  .LBB61_202: # %else778
+; CHECK-RV64-NEXT:    andi a2, a1, 16
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_203
+; CHECK-RV64-NEXT:    j .LBB61_717
+; CHECK-RV64-NEXT:  .LBB61_203: # %else782
+; CHECK-RV64-NEXT:    andi a2, a1, 32
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_204
+; CHECK-RV64-NEXT:    j .LBB61_718
+; CHECK-RV64-NEXT:  .LBB61_204: # %else786
+; CHECK-RV64-NEXT:    andi a2, a1, 64
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_205
+; CHECK-RV64-NEXT:    j .LBB61_719
+; CHECK-RV64-NEXT:  .LBB61_205: # %else790
+; CHECK-RV64-NEXT:    andi a2, a1, 128
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_206
+; CHECK-RV64-NEXT:    j .LBB61_720
+; CHECK-RV64-NEXT:  .LBB61_206: # %else794
+; CHECK-RV64-NEXT:    andi a2, a1, 256
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_207
+; CHECK-RV64-NEXT:    j .LBB61_721
+; CHECK-RV64-NEXT:  .LBB61_207: # %else798
+; CHECK-RV64-NEXT:    andi a2, a1, 512
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_208
+; CHECK-RV64-NEXT:    j .LBB61_722
+; CHECK-RV64-NEXT:  .LBB61_208: # %else802
+; CHECK-RV64-NEXT:    andi a2, a1, 1024
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_209
+; CHECK-RV64-NEXT:    j .LBB61_723
+; CHECK-RV64-NEXT:  .LBB61_209: # %else806
+; CHECK-RV64-NEXT:    slli a2, a1, 52
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_210
+; CHECK-RV64-NEXT:    j .LBB61_724
+; CHECK-RV64-NEXT:  .LBB61_210: # %else810
+; CHECK-RV64-NEXT:    slli a2, a1, 51
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_211
+; CHECK-RV64-NEXT:    j .LBB61_725
+; CHECK-RV64-NEXT:  .LBB61_211: # %else814
+; CHECK-RV64-NEXT:    slli a2, a1, 50
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_212
+; CHECK-RV64-NEXT:    j .LBB61_726
+; CHECK-RV64-NEXT:  .LBB61_212: # %else818
+; CHECK-RV64-NEXT:    slli a2, a1, 49
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_213
+; CHECK-RV64-NEXT:    j .LBB61_727
+; CHECK-RV64-NEXT:  .LBB61_213: # %else822
+; CHECK-RV64-NEXT:    slli a2, a1, 48
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_214
+; CHECK-RV64-NEXT:    j .LBB61_728
+; CHECK-RV64-NEXT:  .LBB61_214: # %else826
+; CHECK-RV64-NEXT:    slli a2, a1, 47
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_215
+; CHECK-RV64-NEXT:    j .LBB61_729
+; CHECK-RV64-NEXT:  .LBB61_215: # %else830
+; CHECK-RV64-NEXT:    slli a2, a1, 46
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_216
+; CHECK-RV64-NEXT:    j .LBB61_730
+; CHECK-RV64-NEXT:  .LBB61_216: # %else834
+; CHECK-RV64-NEXT:    slli a2, a1, 45
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_217
+; CHECK-RV64-NEXT:    j .LBB61_731
+; CHECK-RV64-NEXT:  .LBB61_217: # %else838
+; CHECK-RV64-NEXT:    slli a2, a1, 44
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_218
+; CHECK-RV64-NEXT:    j .LBB61_732
+; CHECK-RV64-NEXT:  .LBB61_218: # %else842
+; CHECK-RV64-NEXT:    slli a2, a1, 43
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_219
+; CHECK-RV64-NEXT:    j .LBB61_733
+; CHECK-RV64-NEXT:  .LBB61_219: # %else846
+; CHECK-RV64-NEXT:    slli a2, a1, 42
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_220
+; CHECK-RV64-NEXT:    j .LBB61_734
+; CHECK-RV64-NEXT:  .LBB61_220: # %else850
+; CHECK-RV64-NEXT:    slli a2, a1, 41
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_221
+; CHECK-RV64-NEXT:    j .LBB61_735
+; CHECK-RV64-NEXT:  .LBB61_221: # %else854
+; CHECK-RV64-NEXT:    slli a2, a1, 40
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_222
+; CHECK-RV64-NEXT:    j .LBB61_736
+; CHECK-RV64-NEXT:  .LBB61_222: # %else858
+; CHECK-RV64-NEXT:    slli a2, a1, 39
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_223
+; CHECK-RV64-NEXT:    j .LBB61_737
+; CHECK-RV64-NEXT:  .LBB61_223: # %else862
+; CHECK-RV64-NEXT:    slli a2, a1, 38
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_224
+; CHECK-RV64-NEXT:    j .LBB61_738
+; CHECK-RV64-NEXT:  .LBB61_224: # %else866
+; CHECK-RV64-NEXT:    slli a2, a1, 37
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_225
+; CHECK-RV64-NEXT:    j .LBB61_739
+; CHECK-RV64-NEXT:  .LBB61_225: # %else870
+; CHECK-RV64-NEXT:    slli a2, a1, 36
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_226
+; CHECK-RV64-NEXT:    j .LBB61_740
+; CHECK-RV64-NEXT:  .LBB61_226: # %else874
+; CHECK-RV64-NEXT:    slli a2, a1, 35
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_227
+; CHECK-RV64-NEXT:    j .LBB61_741
+; CHECK-RV64-NEXT:  .LBB61_227: # %else878
+; CHECK-RV64-NEXT:    slli a2, a1, 34
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_228
+; CHECK-RV64-NEXT:    j .LBB61_742
+; CHECK-RV64-NEXT:  .LBB61_228: # %else882
+; CHECK-RV64-NEXT:    slli a2, a1, 33
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_229
+; CHECK-RV64-NEXT:    j .LBB61_743
+; CHECK-RV64-NEXT:  .LBB61_229: # %else886
+; CHECK-RV64-NEXT:    slli a2, a1, 32
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_230
+; CHECK-RV64-NEXT:    j .LBB61_744
+; CHECK-RV64-NEXT:  .LBB61_230: # %else890
+; CHECK-RV64-NEXT:    slli a2, a1, 31
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_231
+; CHECK-RV64-NEXT:    j .LBB61_745
+; CHECK-RV64-NEXT:  .LBB61_231: # %else894
+; CHECK-RV64-NEXT:    slli a2, a1, 30
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_232
+; CHECK-RV64-NEXT:    j .LBB61_746
+; CHECK-RV64-NEXT:  .LBB61_232: # %else898
+; CHECK-RV64-NEXT:    slli a2, a1, 29
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_233
+; CHECK-RV64-NEXT:    j .LBB61_747
+; CHECK-RV64-NEXT:  .LBB61_233: # %else902
+; CHECK-RV64-NEXT:    slli a2, a1, 28
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_234
+; CHECK-RV64-NEXT:    j .LBB61_748
+; CHECK-RV64-NEXT:  .LBB61_234: # %else906
+; CHECK-RV64-NEXT:    slli a2, a1, 27
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_235
+; CHECK-RV64-NEXT:    j .LBB61_749
+; CHECK-RV64-NEXT:  .LBB61_235: # %else910
+; CHECK-RV64-NEXT:    slli a2, a1, 26
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_236
+; CHECK-RV64-NEXT:    j .LBB61_750
+; CHECK-RV64-NEXT:  .LBB61_236: # %else914
+; CHECK-RV64-NEXT:    slli a2, a1, 25
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_237
+; CHECK-RV64-NEXT:    j .LBB61_751
+; CHECK-RV64-NEXT:  .LBB61_237: # %else918
+; CHECK-RV64-NEXT:    slli a2, a1, 24
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_238
+; CHECK-RV64-NEXT:    j .LBB61_752
+; CHECK-RV64-NEXT:  .LBB61_238: # %else922
+; CHECK-RV64-NEXT:    slli a2, a1, 23
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_239
+; CHECK-RV64-NEXT:    j .LBB61_753
+; CHECK-RV64-NEXT:  .LBB61_239: # %else926
+; CHECK-RV64-NEXT:    slli a2, a1, 22
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_240
+; CHECK-RV64-NEXT:    j .LBB61_754
+; CHECK-RV64-NEXT:  .LBB61_240: # %else930
+; CHECK-RV64-NEXT:    slli a2, a1, 21
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_241
+; CHECK-RV64-NEXT:    j .LBB61_755
+; CHECK-RV64-NEXT:  .LBB61_241: # %else934
+; CHECK-RV64-NEXT:    slli a2, a1, 20
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_242
+; CHECK-RV64-NEXT:    j .LBB61_756
+; CHECK-RV64-NEXT:  .LBB61_242: # %else938
+; CHECK-RV64-NEXT:    slli a2, a1, 19
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_243
+; CHECK-RV64-NEXT:    j .LBB61_757
+; CHECK-RV64-NEXT:  .LBB61_243: # %else942
+; CHECK-RV64-NEXT:    slli a2, a1, 18
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_244
+; CHECK-RV64-NEXT:    j .LBB61_758
+; CHECK-RV64-NEXT:  .LBB61_244: # %else946
+; CHECK-RV64-NEXT:    slli a2, a1, 17
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_245
+; CHECK-RV64-NEXT:    j .LBB61_759
+; CHECK-RV64-NEXT:  .LBB61_245: # %else950
+; CHECK-RV64-NEXT:    slli a2, a1, 16
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_246
+; CHECK-RV64-NEXT:    j .LBB61_760
+; CHECK-RV64-NEXT:  .LBB61_246: # %else954
+; CHECK-RV64-NEXT:    slli a2, a1, 15
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_247
+; CHECK-RV64-NEXT:    j .LBB61_761
+; CHECK-RV64-NEXT:  .LBB61_247: # %else958
+; CHECK-RV64-NEXT:    slli a2, a1, 14
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_248
+; CHECK-RV64-NEXT:    j .LBB61_762
+; CHECK-RV64-NEXT:  .LBB61_248: # %else962
+; CHECK-RV64-NEXT:    slli a2, a1, 13
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_249
+; CHECK-RV64-NEXT:    j .LBB61_763
+; CHECK-RV64-NEXT:  .LBB61_249: # %else966
+; CHECK-RV64-NEXT:    slli a2, a1, 12
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_250
+; CHECK-RV64-NEXT:    j .LBB61_764
+; CHECK-RV64-NEXT:  .LBB61_250: # %else970
+; CHECK-RV64-NEXT:    slli a2, a1, 11
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_251
+; CHECK-RV64-NEXT:    j .LBB61_765
+; CHECK-RV64-NEXT:  .LBB61_251: # %else974
+; CHECK-RV64-NEXT:    slli a2, a1, 10
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_252
+; CHECK-RV64-NEXT:    j .LBB61_766
+; CHECK-RV64-NEXT:  .LBB61_252: # %else978
+; CHECK-RV64-NEXT:    slli a2, a1, 9
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_253
+; CHECK-RV64-NEXT:    j .LBB61_767
+; CHECK-RV64-NEXT:  .LBB61_253: # %else982
+; CHECK-RV64-NEXT:    slli a2, a1, 8
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_254
+; CHECK-RV64-NEXT:    j .LBB61_768
+; CHECK-RV64-NEXT:  .LBB61_254: # %else986
+; CHECK-RV64-NEXT:    slli a2, a1, 7
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_255
+; CHECK-RV64-NEXT:    j .LBB61_769
+; CHECK-RV64-NEXT:  .LBB61_255: # %else990
+; CHECK-RV64-NEXT:    slli a2, a1, 6
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_256
+; CHECK-RV64-NEXT:    j .LBB61_770
+; CHECK-RV64-NEXT:  .LBB61_256: # %else994
+; CHECK-RV64-NEXT:    slli a2, a1, 5
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_257
+; CHECK-RV64-NEXT:    j .LBB61_771
+; CHECK-RV64-NEXT:  .LBB61_257: # %else998
+; CHECK-RV64-NEXT:    slli a2, a1, 4
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_258
+; CHECK-RV64-NEXT:    j .LBB61_772
+; CHECK-RV64-NEXT:  .LBB61_258: # %else1002
+; CHECK-RV64-NEXT:    slli a2, a1, 3
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_259
+; CHECK-RV64-NEXT:    j .LBB61_773
+; CHECK-RV64-NEXT:  .LBB61_259: # %else1006
+; CHECK-RV64-NEXT:    slli a2, a1, 2
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_261
+; CHECK-RV64-NEXT:  .LBB61_260: # %cond.load1009
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 254
+; CHECK-RV64-NEXT:    li a3, 253
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:  .LBB61_261: # %else1010
+; CHECK-RV64-NEXT:    slli a2, a1, 1
+; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV64-NEXT:    vslidedown.vi v16, v0, 4
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_263
+; CHECK-RV64-NEXT:  # %bb.262: # %cond.load1013
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    vmv.s.x v20, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 255
+; CHECK-RV64-NEXT:    li a3, 254
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v20, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:  .LBB61_263: # %else1014
+; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.x.s a2, v16
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_264
+; CHECK-RV64-NEXT:    j .LBB61_774
+; CHECK-RV64-NEXT:  .LBB61_264: # %else1018
+; CHECK-RV64-NEXT:    andi a1, a2, 1
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_265
+; CHECK-RV64-NEXT:    j .LBB61_775
+; CHECK-RV64-NEXT:  .LBB61_265: # %else1022
+; CHECK-RV64-NEXT:    andi a1, a2, 2
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_266
+; CHECK-RV64-NEXT:    j .LBB61_776
+; CHECK-RV64-NEXT:  .LBB61_266: # %else1026
+; CHECK-RV64-NEXT:    andi a1, a2, 4
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_267
+; CHECK-RV64-NEXT:    j .LBB61_777
+; CHECK-RV64-NEXT:  .LBB61_267: # %else1030
+; CHECK-RV64-NEXT:    andi a1, a2, 8
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_268
+; CHECK-RV64-NEXT:    j .LBB61_778
+; CHECK-RV64-NEXT:  .LBB61_268: # %else1034
+; CHECK-RV64-NEXT:    andi a1, a2, 16
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_269
+; CHECK-RV64-NEXT:    j .LBB61_779
+; CHECK-RV64-NEXT:  .LBB61_269: # %else1038
+; CHECK-RV64-NEXT:    andi a1, a2, 32
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_270
+; CHECK-RV64-NEXT:    j .LBB61_780
+; CHECK-RV64-NEXT:  .LBB61_270: # %else1042
+; CHECK-RV64-NEXT:    andi a1, a2, 64
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_271
+; CHECK-RV64-NEXT:    j .LBB61_781
+; CHECK-RV64-NEXT:  .LBB61_271: # %else1046
+; CHECK-RV64-NEXT:    andi a1, a2, 128
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_272
+; CHECK-RV64-NEXT:    j .LBB61_782
+; CHECK-RV64-NEXT:  .LBB61_272: # %else1050
+; CHECK-RV64-NEXT:    andi a1, a2, 256
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_273
+; CHECK-RV64-NEXT:    j .LBB61_783
+; CHECK-RV64-NEXT:  .LBB61_273: # %else1054
+; CHECK-RV64-NEXT:    andi a1, a2, 512
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_274
+; CHECK-RV64-NEXT:    j .LBB61_784
+; CHECK-RV64-NEXT:  .LBB61_274: # %else1058
+; CHECK-RV64-NEXT:    andi a1, a2, 1024
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_275
+; CHECK-RV64-NEXT:    j .LBB61_785
+; CHECK-RV64-NEXT:  .LBB61_275: # %else1062
+; CHECK-RV64-NEXT:    slli a1, a2, 52
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_276
+; CHECK-RV64-NEXT:    j .LBB61_786
+; CHECK-RV64-NEXT:  .LBB61_276: # %else1066
+; CHECK-RV64-NEXT:    slli a1, a2, 51
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_277
+; CHECK-RV64-NEXT:    j .LBB61_787
+; CHECK-RV64-NEXT:  .LBB61_277: # %else1070
+; CHECK-RV64-NEXT:    slli a1, a2, 50
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_278
+; CHECK-RV64-NEXT:    j .LBB61_788
+; CHECK-RV64-NEXT:  .LBB61_278: # %else1074
+; CHECK-RV64-NEXT:    slli a1, a2, 49
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_279
+; CHECK-RV64-NEXT:    j .LBB61_789
+; CHECK-RV64-NEXT:  .LBB61_279: # %else1078
+; CHECK-RV64-NEXT:    slli a1, a2, 48
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_280
+; CHECK-RV64-NEXT:    j .LBB61_790
+; CHECK-RV64-NEXT:  .LBB61_280: # %else1082
+; CHECK-RV64-NEXT:    slli a1, a2, 47
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_281
+; CHECK-RV64-NEXT:    j .LBB61_791
+; CHECK-RV64-NEXT:  .LBB61_281: # %else1086
+; CHECK-RV64-NEXT:    slli a1, a2, 46
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_282
+; CHECK-RV64-NEXT:    j .LBB61_792
+; CHECK-RV64-NEXT:  .LBB61_282: # %else1090
+; CHECK-RV64-NEXT:    slli a1, a2, 45
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_283
+; CHECK-RV64-NEXT:    j .LBB61_793
+; CHECK-RV64-NEXT:  .LBB61_283: # %else1094
+; CHECK-RV64-NEXT:    slli a1, a2, 44
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_284
+; CHECK-RV64-NEXT:    j .LBB61_794
+; CHECK-RV64-NEXT:  .LBB61_284: # %else1098
+; CHECK-RV64-NEXT:    slli a1, a2, 43
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_285
+; CHECK-RV64-NEXT:    j .LBB61_795
+; CHECK-RV64-NEXT:  .LBB61_285: # %else1102
+; CHECK-RV64-NEXT:    slli a1, a2, 42
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_286
+; CHECK-RV64-NEXT:    j .LBB61_796
+; CHECK-RV64-NEXT:  .LBB61_286: # %else1106
+; CHECK-RV64-NEXT:    slli a1, a2, 41
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_287
+; CHECK-RV64-NEXT:    j .LBB61_797
+; CHECK-RV64-NEXT:  .LBB61_287: # %else1110
+; CHECK-RV64-NEXT:    slli a1, a2, 40
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_288
+; CHECK-RV64-NEXT:    j .LBB61_798
+; CHECK-RV64-NEXT:  .LBB61_288: # %else1114
+; CHECK-RV64-NEXT:    slli a1, a2, 39
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_289
+; CHECK-RV64-NEXT:    j .LBB61_799
+; CHECK-RV64-NEXT:  .LBB61_289: # %else1118
+; CHECK-RV64-NEXT:    slli a1, a2, 38
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_290
+; CHECK-RV64-NEXT:    j .LBB61_800
+; CHECK-RV64-NEXT:  .LBB61_290: # %else1122
+; CHECK-RV64-NEXT:    slli a1, a2, 37
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_291
+; CHECK-RV64-NEXT:    j .LBB61_801
+; CHECK-RV64-NEXT:  .LBB61_291: # %else1126
+; CHECK-RV64-NEXT:    slli a1, a2, 36
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_292
+; CHECK-RV64-NEXT:    j .LBB61_802
+; CHECK-RV64-NEXT:  .LBB61_292: # %else1130
+; CHECK-RV64-NEXT:    slli a1, a2, 35
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_293
+; CHECK-RV64-NEXT:    j .LBB61_803
+; CHECK-RV64-NEXT:  .LBB61_293: # %else1134
+; CHECK-RV64-NEXT:    slli a1, a2, 34
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_294
+; CHECK-RV64-NEXT:    j .LBB61_804
+; CHECK-RV64-NEXT:  .LBB61_294: # %else1138
+; CHECK-RV64-NEXT:    slli a1, a2, 33
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_295
+; CHECK-RV64-NEXT:    j .LBB61_805
+; CHECK-RV64-NEXT:  .LBB61_295: # %else1142
+; CHECK-RV64-NEXT:    slli a1, a2, 32
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_296
+; CHECK-RV64-NEXT:    j .LBB61_806
+; CHECK-RV64-NEXT:  .LBB61_296: # %else1146
+; CHECK-RV64-NEXT:    slli a1, a2, 31
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_297
+; CHECK-RV64-NEXT:    j .LBB61_807
+; CHECK-RV64-NEXT:  .LBB61_297: # %else1150
+; CHECK-RV64-NEXT:    slli a1, a2, 30
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_298
+; CHECK-RV64-NEXT:    j .LBB61_808
+; CHECK-RV64-NEXT:  .LBB61_298: # %else1154
+; CHECK-RV64-NEXT:    slli a1, a2, 29
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_299
+; CHECK-RV64-NEXT:    j .LBB61_809
+; CHECK-RV64-NEXT:  .LBB61_299: # %else1158
+; CHECK-RV64-NEXT:    slli a1, a2, 28
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_300
+; CHECK-RV64-NEXT:    j .LBB61_810
+; CHECK-RV64-NEXT:  .LBB61_300: # %else1162
+; CHECK-RV64-NEXT:    slli a1, a2, 27
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_301
+; CHECK-RV64-NEXT:    j .LBB61_811
+; CHECK-RV64-NEXT:  .LBB61_301: # %else1166
+; CHECK-RV64-NEXT:    slli a1, a2, 26
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_302
+; CHECK-RV64-NEXT:    j .LBB61_812
+; CHECK-RV64-NEXT:  .LBB61_302: # %else1170
+; CHECK-RV64-NEXT:    slli a1, a2, 25
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_303
+; CHECK-RV64-NEXT:    j .LBB61_813
+; CHECK-RV64-NEXT:  .LBB61_303: # %else1174
+; CHECK-RV64-NEXT:    slli a1, a2, 24
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_304
+; CHECK-RV64-NEXT:    j .LBB61_814
+; CHECK-RV64-NEXT:  .LBB61_304: # %else1178
+; CHECK-RV64-NEXT:    slli a1, a2, 23
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_305
+; CHECK-RV64-NEXT:    j .LBB61_815
+; CHECK-RV64-NEXT:  .LBB61_305: # %else1182
+; CHECK-RV64-NEXT:    slli a1, a2, 22
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_306
+; CHECK-RV64-NEXT:    j .LBB61_816
+; CHECK-RV64-NEXT:  .LBB61_306: # %else1186
+; CHECK-RV64-NEXT:    slli a1, a2, 21
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_307
+; CHECK-RV64-NEXT:    j .LBB61_817
+; CHECK-RV64-NEXT:  .LBB61_307: # %else1190
+; CHECK-RV64-NEXT:    slli a1, a2, 20
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_308
+; CHECK-RV64-NEXT:    j .LBB61_818
+; CHECK-RV64-NEXT:  .LBB61_308: # %else1194
+; CHECK-RV64-NEXT:    slli a1, a2, 19
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_309
+; CHECK-RV64-NEXT:    j .LBB61_819
+; CHECK-RV64-NEXT:  .LBB61_309: # %else1198
+; CHECK-RV64-NEXT:    slli a1, a2, 18
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_310
+; CHECK-RV64-NEXT:    j .LBB61_820
+; CHECK-RV64-NEXT:  .LBB61_310: # %else1202
+; CHECK-RV64-NEXT:    slli a1, a2, 17
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_311
+; CHECK-RV64-NEXT:    j .LBB61_821
+; CHECK-RV64-NEXT:  .LBB61_311: # %else1206
+; CHECK-RV64-NEXT:    slli a1, a2, 16
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_312
+; CHECK-RV64-NEXT:    j .LBB61_822
+; CHECK-RV64-NEXT:  .LBB61_312: # %else1210
+; CHECK-RV64-NEXT:    slli a1, a2, 15
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_313
+; CHECK-RV64-NEXT:    j .LBB61_823
+; CHECK-RV64-NEXT:  .LBB61_313: # %else1214
+; CHECK-RV64-NEXT:    slli a1, a2, 14
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_314
+; CHECK-RV64-NEXT:    j .LBB61_824
+; CHECK-RV64-NEXT:  .LBB61_314: # %else1218
+; CHECK-RV64-NEXT:    slli a1, a2, 13
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_315
+; CHECK-RV64-NEXT:    j .LBB61_825
+; CHECK-RV64-NEXT:  .LBB61_315: # %else1222
+; CHECK-RV64-NEXT:    slli a1, a2, 12
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_316
+; CHECK-RV64-NEXT:    j .LBB61_826
+; CHECK-RV64-NEXT:  .LBB61_316: # %else1226
+; CHECK-RV64-NEXT:    slli a1, a2, 11
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_317
+; CHECK-RV64-NEXT:    j .LBB61_827
+; CHECK-RV64-NEXT:  .LBB61_317: # %else1230
+; CHECK-RV64-NEXT:    slli a1, a2, 10
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_318
+; CHECK-RV64-NEXT:    j .LBB61_828
+; CHECK-RV64-NEXT:  .LBB61_318: # %else1234
+; CHECK-RV64-NEXT:    slli a1, a2, 9
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_319
+; CHECK-RV64-NEXT:    j .LBB61_829
+; CHECK-RV64-NEXT:  .LBB61_319: # %else1238
+; CHECK-RV64-NEXT:    slli a1, a2, 8
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_320
+; CHECK-RV64-NEXT:    j .LBB61_830
+; CHECK-RV64-NEXT:  .LBB61_320: # %else1242
+; CHECK-RV64-NEXT:    slli a1, a2, 7
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_321
+; CHECK-RV64-NEXT:    j .LBB61_831
+; CHECK-RV64-NEXT:  .LBB61_321: # %else1246
+; CHECK-RV64-NEXT:    slli a1, a2, 6
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_322
+; CHECK-RV64-NEXT:    j .LBB61_832
+; CHECK-RV64-NEXT:  .LBB61_322: # %else1250
+; CHECK-RV64-NEXT:    slli a1, a2, 5
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_323
+; CHECK-RV64-NEXT:    j .LBB61_833
+; CHECK-RV64-NEXT:  .LBB61_323: # %else1254
+; CHECK-RV64-NEXT:    slli a1, a2, 4
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_324
+; CHECK-RV64-NEXT:    j .LBB61_834
+; CHECK-RV64-NEXT:  .LBB61_324: # %else1258
+; CHECK-RV64-NEXT:    slli a1, a2, 3
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_325
+; CHECK-RV64-NEXT:    j .LBB61_835
+; CHECK-RV64-NEXT:  .LBB61_325: # %else1262
+; CHECK-RV64-NEXT:    slli a1, a2, 2
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_327
+; CHECK-RV64-NEXT:  .LBB61_326: # %cond.load1265
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 318
+; CHECK-RV64-NEXT:    li a3, 317
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:  .LBB61_327: # %else1266
+; CHECK-RV64-NEXT:    slli a1, a2, 1
+; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV64-NEXT:    vslidedown.vi v16, v0, 5
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_329
+; CHECK-RV64-NEXT:  # %bb.328: # %cond.load1269
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    li a1, 319
+; CHECK-RV64-NEXT:    li a3, 318
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:  .LBB61_329: # %else1270
+; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.x.s a1, v16
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_330
+; CHECK-RV64-NEXT:    j .LBB61_836
+; CHECK-RV64-NEXT:  .LBB61_330: # %else1274
+; CHECK-RV64-NEXT:    andi a2, a1, 1
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_331
+; CHECK-RV64-NEXT:    j .LBB61_837
+; CHECK-RV64-NEXT:  .LBB61_331: # %else1278
+; CHECK-RV64-NEXT:    andi a2, a1, 2
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_332
+; CHECK-RV64-NEXT:    j .LBB61_838
+; CHECK-RV64-NEXT:  .LBB61_332: # %else1282
+; CHECK-RV64-NEXT:    andi a2, a1, 4
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_333
+; CHECK-RV64-NEXT:    j .LBB61_839
+; CHECK-RV64-NEXT:  .LBB61_333: # %else1286
+; CHECK-RV64-NEXT:    andi a2, a1, 8
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_334
+; CHECK-RV64-NEXT:    j .LBB61_840
+; CHECK-RV64-NEXT:  .LBB61_334: # %else1290
+; CHECK-RV64-NEXT:    andi a2, a1, 16
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_335
+; CHECK-RV64-NEXT:    j .LBB61_841
+; CHECK-RV64-NEXT:  .LBB61_335: # %else1294
+; CHECK-RV64-NEXT:    andi a2, a1, 32
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_336
+; CHECK-RV64-NEXT:    j .LBB61_842
+; CHECK-RV64-NEXT:  .LBB61_336: # %else1298
+; CHECK-RV64-NEXT:    andi a2, a1, 64
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_337
+; CHECK-RV64-NEXT:    j .LBB61_843
+; CHECK-RV64-NEXT:  .LBB61_337: # %else1302
+; CHECK-RV64-NEXT:    andi a2, a1, 128
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_338
+; CHECK-RV64-NEXT:    j .LBB61_844
+; CHECK-RV64-NEXT:  .LBB61_338: # %else1306
+; CHECK-RV64-NEXT:    andi a2, a1, 256
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_339
+; CHECK-RV64-NEXT:    j .LBB61_845
+; CHECK-RV64-NEXT:  .LBB61_339: # %else1310
+; CHECK-RV64-NEXT:    andi a2, a1, 512
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_340
+; CHECK-RV64-NEXT:    j .LBB61_846
+; CHECK-RV64-NEXT:  .LBB61_340: # %else1314
+; CHECK-RV64-NEXT:    andi a2, a1, 1024
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_341
+; CHECK-RV64-NEXT:    j .LBB61_847
+; CHECK-RV64-NEXT:  .LBB61_341: # %else1318
+; CHECK-RV64-NEXT:    slli a2, a1, 52
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_342
+; CHECK-RV64-NEXT:    j .LBB61_848
+; CHECK-RV64-NEXT:  .LBB61_342: # %else1322
+; CHECK-RV64-NEXT:    slli a2, a1, 51
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_343
+; CHECK-RV64-NEXT:    j .LBB61_849
+; CHECK-RV64-NEXT:  .LBB61_343: # %else1326
+; CHECK-RV64-NEXT:    slli a2, a1, 50
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_344
+; CHECK-RV64-NEXT:    j .LBB61_850
+; CHECK-RV64-NEXT:  .LBB61_344: # %else1330
+; CHECK-RV64-NEXT:    slli a2, a1, 49
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_345
+; CHECK-RV64-NEXT:    j .LBB61_851
+; CHECK-RV64-NEXT:  .LBB61_345: # %else1334
+; CHECK-RV64-NEXT:    slli a2, a1, 48
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_346
+; CHECK-RV64-NEXT:    j .LBB61_852
+; CHECK-RV64-NEXT:  .LBB61_346: # %else1338
+; CHECK-RV64-NEXT:    slli a2, a1, 47
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_347
+; CHECK-RV64-NEXT:    j .LBB61_853
+; CHECK-RV64-NEXT:  .LBB61_347: # %else1342
+; CHECK-RV64-NEXT:    slli a2, a1, 46
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_348
+; CHECK-RV64-NEXT:    j .LBB61_854
+; CHECK-RV64-NEXT:  .LBB61_348: # %else1346
+; CHECK-RV64-NEXT:    slli a2, a1, 45
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_349
+; CHECK-RV64-NEXT:    j .LBB61_855
+; CHECK-RV64-NEXT:  .LBB61_349: # %else1350
+; CHECK-RV64-NEXT:    slli a2, a1, 44
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_350
+; CHECK-RV64-NEXT:    j .LBB61_856
+; CHECK-RV64-NEXT:  .LBB61_350: # %else1354
+; CHECK-RV64-NEXT:    slli a2, a1, 43
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_351
+; CHECK-RV64-NEXT:    j .LBB61_857
+; CHECK-RV64-NEXT:  .LBB61_351: # %else1358
+; CHECK-RV64-NEXT:    slli a2, a1, 42
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_352
+; CHECK-RV64-NEXT:    j .LBB61_858
+; CHECK-RV64-NEXT:  .LBB61_352: # %else1362
+; CHECK-RV64-NEXT:    slli a2, a1, 41
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_353
+; CHECK-RV64-NEXT:    j .LBB61_859
+; CHECK-RV64-NEXT:  .LBB61_353: # %else1366
+; CHECK-RV64-NEXT:    slli a2, a1, 40
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_354
+; CHECK-RV64-NEXT:    j .LBB61_860
+; CHECK-RV64-NEXT:  .LBB61_354: # %else1370
+; CHECK-RV64-NEXT:    slli a2, a1, 39
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_355
+; CHECK-RV64-NEXT:    j .LBB61_861
+; CHECK-RV64-NEXT:  .LBB61_355: # %else1374
+; CHECK-RV64-NEXT:    slli a2, a1, 38
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_356
+; CHECK-RV64-NEXT:    j .LBB61_862
+; CHECK-RV64-NEXT:  .LBB61_356: # %else1378
+; CHECK-RV64-NEXT:    slli a2, a1, 37
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_357
+; CHECK-RV64-NEXT:    j .LBB61_863
+; CHECK-RV64-NEXT:  .LBB61_357: # %else1382
+; CHECK-RV64-NEXT:    slli a2, a1, 36
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_358
+; CHECK-RV64-NEXT:    j .LBB61_864
+; CHECK-RV64-NEXT:  .LBB61_358: # %else1386
+; CHECK-RV64-NEXT:    slli a2, a1, 35
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_359
+; CHECK-RV64-NEXT:    j .LBB61_865
+; CHECK-RV64-NEXT:  .LBB61_359: # %else1390
+; CHECK-RV64-NEXT:    slli a2, a1, 34
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_360
+; CHECK-RV64-NEXT:    j .LBB61_866
+; CHECK-RV64-NEXT:  .LBB61_360: # %else1394
+; CHECK-RV64-NEXT:    slli a2, a1, 33
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_361
+; CHECK-RV64-NEXT:    j .LBB61_867
+; CHECK-RV64-NEXT:  .LBB61_361: # %else1398
+; CHECK-RV64-NEXT:    slli a2, a1, 32
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_362
+; CHECK-RV64-NEXT:    j .LBB61_868
+; CHECK-RV64-NEXT:  .LBB61_362: # %else1402
+; CHECK-RV64-NEXT:    slli a2, a1, 31
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_363
+; CHECK-RV64-NEXT:    j .LBB61_869
+; CHECK-RV64-NEXT:  .LBB61_363: # %else1406
+; CHECK-RV64-NEXT:    slli a2, a1, 30
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_364
+; CHECK-RV64-NEXT:    j .LBB61_870
+; CHECK-RV64-NEXT:  .LBB61_364: # %else1410
+; CHECK-RV64-NEXT:    slli a2, a1, 29
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_365
+; CHECK-RV64-NEXT:    j .LBB61_871
+; CHECK-RV64-NEXT:  .LBB61_365: # %else1414
+; CHECK-RV64-NEXT:    slli a2, a1, 28
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_366
+; CHECK-RV64-NEXT:    j .LBB61_872
+; CHECK-RV64-NEXT:  .LBB61_366: # %else1418
+; CHECK-RV64-NEXT:    slli a2, a1, 27
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_367
+; CHECK-RV64-NEXT:    j .LBB61_873
+; CHECK-RV64-NEXT:  .LBB61_367: # %else1422
+; CHECK-RV64-NEXT:    slli a2, a1, 26
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_368
+; CHECK-RV64-NEXT:    j .LBB61_874
+; CHECK-RV64-NEXT:  .LBB61_368: # %else1426
+; CHECK-RV64-NEXT:    slli a2, a1, 25
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_369
+; CHECK-RV64-NEXT:    j .LBB61_875
+; CHECK-RV64-NEXT:  .LBB61_369: # %else1430
+; CHECK-RV64-NEXT:    slli a2, a1, 24
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_370
+; CHECK-RV64-NEXT:    j .LBB61_876
+; CHECK-RV64-NEXT:  .LBB61_370: # %else1434
+; CHECK-RV64-NEXT:    slli a2, a1, 23
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_371
+; CHECK-RV64-NEXT:    j .LBB61_877
+; CHECK-RV64-NEXT:  .LBB61_371: # %else1438
+; CHECK-RV64-NEXT:    slli a2, a1, 22
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_372
+; CHECK-RV64-NEXT:    j .LBB61_878
+; CHECK-RV64-NEXT:  .LBB61_372: # %else1442
+; CHECK-RV64-NEXT:    slli a2, a1, 21
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_373
+; CHECK-RV64-NEXT:    j .LBB61_879
+; CHECK-RV64-NEXT:  .LBB61_373: # %else1446
+; CHECK-RV64-NEXT:    slli a2, a1, 20
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_374
+; CHECK-RV64-NEXT:    j .LBB61_880
+; CHECK-RV64-NEXT:  .LBB61_374: # %else1450
+; CHECK-RV64-NEXT:    slli a2, a1, 19
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_375
+; CHECK-RV64-NEXT:    j .LBB61_881
+; CHECK-RV64-NEXT:  .LBB61_375: # %else1454
+; CHECK-RV64-NEXT:    slli a2, a1, 18
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_376
+; CHECK-RV64-NEXT:    j .LBB61_882
+; CHECK-RV64-NEXT:  .LBB61_376: # %else1458
+; CHECK-RV64-NEXT:    slli a2, a1, 17
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_377
+; CHECK-RV64-NEXT:    j .LBB61_883
+; CHECK-RV64-NEXT:  .LBB61_377: # %else1462
+; CHECK-RV64-NEXT:    slli a2, a1, 16
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_378
+; CHECK-RV64-NEXT:    j .LBB61_884
+; CHECK-RV64-NEXT:  .LBB61_378: # %else1466
+; CHECK-RV64-NEXT:    slli a2, a1, 15
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_379
+; CHECK-RV64-NEXT:    j .LBB61_885
+; CHECK-RV64-NEXT:  .LBB61_379: # %else1470
+; CHECK-RV64-NEXT:    slli a2, a1, 14
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_380
+; CHECK-RV64-NEXT:    j .LBB61_886
+; CHECK-RV64-NEXT:  .LBB61_380: # %else1474
+; CHECK-RV64-NEXT:    slli a2, a1, 13
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_381
+; CHECK-RV64-NEXT:    j .LBB61_887
+; CHECK-RV64-NEXT:  .LBB61_381: # %else1478
+; CHECK-RV64-NEXT:    slli a2, a1, 12
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_382
+; CHECK-RV64-NEXT:    j .LBB61_888
+; CHECK-RV64-NEXT:  .LBB61_382: # %else1482
+; CHECK-RV64-NEXT:    slli a2, a1, 11
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_383
+; CHECK-RV64-NEXT:    j .LBB61_889
+; CHECK-RV64-NEXT:  .LBB61_383: # %else1486
+; CHECK-RV64-NEXT:    slli a2, a1, 10
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_384
+; CHECK-RV64-NEXT:    j .LBB61_890
+; CHECK-RV64-NEXT:  .LBB61_384: # %else1490
+; CHECK-RV64-NEXT:    slli a2, a1, 9
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_385
+; CHECK-RV64-NEXT:    j .LBB61_891
+; CHECK-RV64-NEXT:  .LBB61_385: # %else1494
+; CHECK-RV64-NEXT:    slli a2, a1, 8
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_386
+; CHECK-RV64-NEXT:    j .LBB61_892
+; CHECK-RV64-NEXT:  .LBB61_386: # %else1498
+; CHECK-RV64-NEXT:    slli a2, a1, 7
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_387
+; CHECK-RV64-NEXT:    j .LBB61_893
+; CHECK-RV64-NEXT:  .LBB61_387: # %else1502
+; CHECK-RV64-NEXT:    slli a2, a1, 6
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_388
+; CHECK-RV64-NEXT:    j .LBB61_894
+; CHECK-RV64-NEXT:  .LBB61_388: # %else1506
+; CHECK-RV64-NEXT:    slli a2, a1, 5
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_389
+; CHECK-RV64-NEXT:    j .LBB61_895
+; CHECK-RV64-NEXT:  .LBB61_389: # %else1510
+; CHECK-RV64-NEXT:    slli a2, a1, 4
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_390
+; CHECK-RV64-NEXT:    j .LBB61_896
+; CHECK-RV64-NEXT:  .LBB61_390: # %else1514
+; CHECK-RV64-NEXT:    slli a2, a1, 3
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_391
+; CHECK-RV64-NEXT:    j .LBB61_897
+; CHECK-RV64-NEXT:  .LBB61_391: # %else1518
+; CHECK-RV64-NEXT:    slli a2, a1, 2
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_393
+; CHECK-RV64-NEXT:  .LBB61_392: # %cond.load1521
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 382
+; CHECK-RV64-NEXT:    li a3, 381
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:  .LBB61_393: # %else1522
+; CHECK-RV64-NEXT:    slli a2, a1, 1
+; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV64-NEXT:    vslidedown.vi v16, v0, 6
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_395
+; CHECK-RV64-NEXT:  # %bb.394: # %cond.load1525
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    li a2, 383
+; CHECK-RV64-NEXT:    li a3, 382
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:  .LBB61_395: # %else1526
+; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.x.s a2, v16
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_396
+; CHECK-RV64-NEXT:    j .LBB61_898
+; CHECK-RV64-NEXT:  .LBB61_396: # %else1530
+; CHECK-RV64-NEXT:    andi a1, a2, 1
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_397
+; CHECK-RV64-NEXT:    j .LBB61_899
+; CHECK-RV64-NEXT:  .LBB61_397: # %else1534
+; CHECK-RV64-NEXT:    andi a1, a2, 2
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_398
+; CHECK-RV64-NEXT:    j .LBB61_900
+; CHECK-RV64-NEXT:  .LBB61_398: # %else1538
+; CHECK-RV64-NEXT:    andi a1, a2, 4
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_399
+; CHECK-RV64-NEXT:    j .LBB61_901
+; CHECK-RV64-NEXT:  .LBB61_399: # %else1542
+; CHECK-RV64-NEXT:    andi a1, a2, 8
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_400
+; CHECK-RV64-NEXT:    j .LBB61_902
+; CHECK-RV64-NEXT:  .LBB61_400: # %else1546
+; CHECK-RV64-NEXT:    andi a1, a2, 16
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_401
+; CHECK-RV64-NEXT:    j .LBB61_903
+; CHECK-RV64-NEXT:  .LBB61_401: # %else1550
+; CHECK-RV64-NEXT:    andi a1, a2, 32
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_402
+; CHECK-RV64-NEXT:    j .LBB61_904
+; CHECK-RV64-NEXT:  .LBB61_402: # %else1554
+; CHECK-RV64-NEXT:    andi a1, a2, 64
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_403
+; CHECK-RV64-NEXT:    j .LBB61_905
+; CHECK-RV64-NEXT:  .LBB61_403: # %else1558
+; CHECK-RV64-NEXT:    andi a1, a2, 128
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_404
+; CHECK-RV64-NEXT:    j .LBB61_906
+; CHECK-RV64-NEXT:  .LBB61_404: # %else1562
+; CHECK-RV64-NEXT:    andi a1, a2, 256
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_405
+; CHECK-RV64-NEXT:    j .LBB61_907
+; CHECK-RV64-NEXT:  .LBB61_405: # %else1566
+; CHECK-RV64-NEXT:    andi a1, a2, 512
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_406
+; CHECK-RV64-NEXT:    j .LBB61_908
+; CHECK-RV64-NEXT:  .LBB61_406: # %else1570
+; CHECK-RV64-NEXT:    andi a1, a2, 1024
+; CHECK-RV64-NEXT:    beqz a1, .LBB61_407
+; CHECK-RV64-NEXT:    j .LBB61_909
+; CHECK-RV64-NEXT:  .LBB61_407: # %else1574
+; CHECK-RV64-NEXT:    slli a1, a2, 52
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_408
+; CHECK-RV64-NEXT:    j .LBB61_910
+; CHECK-RV64-NEXT:  .LBB61_408: # %else1578
+; CHECK-RV64-NEXT:    slli a1, a2, 51
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_409
+; CHECK-RV64-NEXT:    j .LBB61_911
+; CHECK-RV64-NEXT:  .LBB61_409: # %else1582
+; CHECK-RV64-NEXT:    slli a1, a2, 50
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_410
+; CHECK-RV64-NEXT:    j .LBB61_912
+; CHECK-RV64-NEXT:  .LBB61_410: # %else1586
+; CHECK-RV64-NEXT:    slli a1, a2, 49
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_411
+; CHECK-RV64-NEXT:    j .LBB61_913
+; CHECK-RV64-NEXT:  .LBB61_411: # %else1590
+; CHECK-RV64-NEXT:    slli a1, a2, 48
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_412
+; CHECK-RV64-NEXT:    j .LBB61_914
+; CHECK-RV64-NEXT:  .LBB61_412: # %else1594
+; CHECK-RV64-NEXT:    slli a1, a2, 47
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_413
+; CHECK-RV64-NEXT:    j .LBB61_915
+; CHECK-RV64-NEXT:  .LBB61_413: # %else1598
+; CHECK-RV64-NEXT:    slli a1, a2, 46
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_414
+; CHECK-RV64-NEXT:    j .LBB61_916
+; CHECK-RV64-NEXT:  .LBB61_414: # %else1602
+; CHECK-RV64-NEXT:    slli a1, a2, 45
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_415
+; CHECK-RV64-NEXT:    j .LBB61_917
+; CHECK-RV64-NEXT:  .LBB61_415: # %else1606
+; CHECK-RV64-NEXT:    slli a1, a2, 44
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_416
+; CHECK-RV64-NEXT:    j .LBB61_918
+; CHECK-RV64-NEXT:  .LBB61_416: # %else1610
+; CHECK-RV64-NEXT:    slli a1, a2, 43
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_417
+; CHECK-RV64-NEXT:    j .LBB61_919
+; CHECK-RV64-NEXT:  .LBB61_417: # %else1614
+; CHECK-RV64-NEXT:    slli a1, a2, 42
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_418
+; CHECK-RV64-NEXT:    j .LBB61_920
+; CHECK-RV64-NEXT:  .LBB61_418: # %else1618
+; CHECK-RV64-NEXT:    slli a1, a2, 41
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_419
+; CHECK-RV64-NEXT:    j .LBB61_921
+; CHECK-RV64-NEXT:  .LBB61_419: # %else1622
+; CHECK-RV64-NEXT:    slli a1, a2, 40
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_420
+; CHECK-RV64-NEXT:    j .LBB61_922
+; CHECK-RV64-NEXT:  .LBB61_420: # %else1626
+; CHECK-RV64-NEXT:    slli a1, a2, 39
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_421
+; CHECK-RV64-NEXT:    j .LBB61_923
+; CHECK-RV64-NEXT:  .LBB61_421: # %else1630
+; CHECK-RV64-NEXT:    slli a1, a2, 38
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_422
+; CHECK-RV64-NEXT:    j .LBB61_924
+; CHECK-RV64-NEXT:  .LBB61_422: # %else1634
+; CHECK-RV64-NEXT:    slli a1, a2, 37
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_423
+; CHECK-RV64-NEXT:    j .LBB61_925
+; CHECK-RV64-NEXT:  .LBB61_423: # %else1638
+; CHECK-RV64-NEXT:    slli a1, a2, 36
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_424
+; CHECK-RV64-NEXT:    j .LBB61_926
+; CHECK-RV64-NEXT:  .LBB61_424: # %else1642
+; CHECK-RV64-NEXT:    slli a1, a2, 35
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_425
+; CHECK-RV64-NEXT:    j .LBB61_927
+; CHECK-RV64-NEXT:  .LBB61_425: # %else1646
+; CHECK-RV64-NEXT:    slli a1, a2, 34
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_426
+; CHECK-RV64-NEXT:    j .LBB61_928
+; CHECK-RV64-NEXT:  .LBB61_426: # %else1650
+; CHECK-RV64-NEXT:    slli a1, a2, 33
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_427
+; CHECK-RV64-NEXT:    j .LBB61_929
+; CHECK-RV64-NEXT:  .LBB61_427: # %else1654
+; CHECK-RV64-NEXT:    slli a1, a2, 32
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_428
+; CHECK-RV64-NEXT:    j .LBB61_930
+; CHECK-RV64-NEXT:  .LBB61_428: # %else1658
+; CHECK-RV64-NEXT:    slli a1, a2, 31
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_429
+; CHECK-RV64-NEXT:    j .LBB61_931
+; CHECK-RV64-NEXT:  .LBB61_429: # %else1662
+; CHECK-RV64-NEXT:    slli a1, a2, 30
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_430
+; CHECK-RV64-NEXT:    j .LBB61_932
+; CHECK-RV64-NEXT:  .LBB61_430: # %else1666
+; CHECK-RV64-NEXT:    slli a1, a2, 29
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_431
+; CHECK-RV64-NEXT:    j .LBB61_933
+; CHECK-RV64-NEXT:  .LBB61_431: # %else1670
+; CHECK-RV64-NEXT:    slli a1, a2, 28
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_432
+; CHECK-RV64-NEXT:    j .LBB61_934
+; CHECK-RV64-NEXT:  .LBB61_432: # %else1674
+; CHECK-RV64-NEXT:    slli a1, a2, 27
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_433
+; CHECK-RV64-NEXT:    j .LBB61_935
+; CHECK-RV64-NEXT:  .LBB61_433: # %else1678
+; CHECK-RV64-NEXT:    slli a1, a2, 26
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_434
+; CHECK-RV64-NEXT:    j .LBB61_936
+; CHECK-RV64-NEXT:  .LBB61_434: # %else1682
+; CHECK-RV64-NEXT:    slli a1, a2, 25
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_435
+; CHECK-RV64-NEXT:    j .LBB61_937
+; CHECK-RV64-NEXT:  .LBB61_435: # %else1686
+; CHECK-RV64-NEXT:    slli a1, a2, 24
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_436
+; CHECK-RV64-NEXT:    j .LBB61_938
+; CHECK-RV64-NEXT:  .LBB61_436: # %else1690
+; CHECK-RV64-NEXT:    slli a1, a2, 23
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_437
+; CHECK-RV64-NEXT:    j .LBB61_939
+; CHECK-RV64-NEXT:  .LBB61_437: # %else1694
+; CHECK-RV64-NEXT:    slli a1, a2, 22
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_438
+; CHECK-RV64-NEXT:    j .LBB61_940
+; CHECK-RV64-NEXT:  .LBB61_438: # %else1698
+; CHECK-RV64-NEXT:    slli a1, a2, 21
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_439
+; CHECK-RV64-NEXT:    j .LBB61_941
+; CHECK-RV64-NEXT:  .LBB61_439: # %else1702
+; CHECK-RV64-NEXT:    slli a1, a2, 20
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_440
+; CHECK-RV64-NEXT:    j .LBB61_942
+; CHECK-RV64-NEXT:  .LBB61_440: # %else1706
+; CHECK-RV64-NEXT:    slli a1, a2, 19
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_441
+; CHECK-RV64-NEXT:    j .LBB61_943
+; CHECK-RV64-NEXT:  .LBB61_441: # %else1710
+; CHECK-RV64-NEXT:    slli a1, a2, 18
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_442
+; CHECK-RV64-NEXT:    j .LBB61_944
+; CHECK-RV64-NEXT:  .LBB61_442: # %else1714
+; CHECK-RV64-NEXT:    slli a1, a2, 17
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_443
+; CHECK-RV64-NEXT:    j .LBB61_945
+; CHECK-RV64-NEXT:  .LBB61_443: # %else1718
+; CHECK-RV64-NEXT:    slli a1, a2, 16
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_444
+; CHECK-RV64-NEXT:    j .LBB61_946
+; CHECK-RV64-NEXT:  .LBB61_444: # %else1722
+; CHECK-RV64-NEXT:    slli a1, a2, 15
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_445
+; CHECK-RV64-NEXT:    j .LBB61_947
+; CHECK-RV64-NEXT:  .LBB61_445: # %else1726
+; CHECK-RV64-NEXT:    slli a1, a2, 14
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_446
+; CHECK-RV64-NEXT:    j .LBB61_948
+; CHECK-RV64-NEXT:  .LBB61_446: # %else1730
+; CHECK-RV64-NEXT:    slli a1, a2, 13
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_447
+; CHECK-RV64-NEXT:    j .LBB61_949
+; CHECK-RV64-NEXT:  .LBB61_447: # %else1734
+; CHECK-RV64-NEXT:    slli a1, a2, 12
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_448
+; CHECK-RV64-NEXT:    j .LBB61_950
+; CHECK-RV64-NEXT:  .LBB61_448: # %else1738
+; CHECK-RV64-NEXT:    slli a1, a2, 11
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_449
+; CHECK-RV64-NEXT:    j .LBB61_951
+; CHECK-RV64-NEXT:  .LBB61_449: # %else1742
+; CHECK-RV64-NEXT:    slli a1, a2, 10
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_450
+; CHECK-RV64-NEXT:    j .LBB61_952
+; CHECK-RV64-NEXT:  .LBB61_450: # %else1746
+; CHECK-RV64-NEXT:    slli a1, a2, 9
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_451
+; CHECK-RV64-NEXT:    j .LBB61_953
+; CHECK-RV64-NEXT:  .LBB61_451: # %else1750
+; CHECK-RV64-NEXT:    slli a1, a2, 8
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_452
+; CHECK-RV64-NEXT:    j .LBB61_954
+; CHECK-RV64-NEXT:  .LBB61_452: # %else1754
+; CHECK-RV64-NEXT:    slli a1, a2, 7
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_453
+; CHECK-RV64-NEXT:    j .LBB61_955
+; CHECK-RV64-NEXT:  .LBB61_453: # %else1758
+; CHECK-RV64-NEXT:    slli a1, a2, 6
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_454
+; CHECK-RV64-NEXT:    j .LBB61_956
+; CHECK-RV64-NEXT:  .LBB61_454: # %else1762
+; CHECK-RV64-NEXT:    slli a1, a2, 5
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_455
+; CHECK-RV64-NEXT:    j .LBB61_957
+; CHECK-RV64-NEXT:  .LBB61_455: # %else1766
+; CHECK-RV64-NEXT:    slli a1, a2, 4
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_456
+; CHECK-RV64-NEXT:    j .LBB61_958
+; CHECK-RV64-NEXT:  .LBB61_456: # %else1770
+; CHECK-RV64-NEXT:    slli a1, a2, 3
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_457
+; CHECK-RV64-NEXT:    j .LBB61_959
+; CHECK-RV64-NEXT:  .LBB61_457: # %else1774
+; CHECK-RV64-NEXT:    slli a1, a2, 2
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_459
+; CHECK-RV64-NEXT:  .LBB61_458: # %cond.load1777
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 446
+; CHECK-RV64-NEXT:    li a3, 445
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:  .LBB61_459: # %else1778
+; CHECK-RV64-NEXT:    slli a1, a2, 1
+; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV64-NEXT:    vslidedown.vi v16, v0, 7
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_461
+; CHECK-RV64-NEXT:  # %bb.460: # %cond.load1781
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    li a1, 447
+; CHECK-RV64-NEXT:    li a3, 446
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:  .LBB61_461: # %else1782
+; CHECK-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.x.s a1, v16
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_462
+; CHECK-RV64-NEXT:    j .LBB61_960
+; CHECK-RV64-NEXT:  .LBB61_462: # %else1786
+; CHECK-RV64-NEXT:    andi a2, a1, 1
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_463
+; CHECK-RV64-NEXT:    j .LBB61_961
+; CHECK-RV64-NEXT:  .LBB61_463: # %else1790
+; CHECK-RV64-NEXT:    andi a2, a1, 2
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_464
+; CHECK-RV64-NEXT:    j .LBB61_962
+; CHECK-RV64-NEXT:  .LBB61_464: # %else1794
+; CHECK-RV64-NEXT:    andi a2, a1, 4
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_465
+; CHECK-RV64-NEXT:    j .LBB61_963
+; CHECK-RV64-NEXT:  .LBB61_465: # %else1798
+; CHECK-RV64-NEXT:    andi a2, a1, 8
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_466
+; CHECK-RV64-NEXT:    j .LBB61_964
+; CHECK-RV64-NEXT:  .LBB61_466: # %else1802
+; CHECK-RV64-NEXT:    andi a2, a1, 16
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_467
+; CHECK-RV64-NEXT:    j .LBB61_965
+; CHECK-RV64-NEXT:  .LBB61_467: # %else1806
+; CHECK-RV64-NEXT:    andi a2, a1, 32
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_468
+; CHECK-RV64-NEXT:    j .LBB61_966
+; CHECK-RV64-NEXT:  .LBB61_468: # %else1810
+; CHECK-RV64-NEXT:    andi a2, a1, 64
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_469
+; CHECK-RV64-NEXT:    j .LBB61_967
+; CHECK-RV64-NEXT:  .LBB61_469: # %else1814
+; CHECK-RV64-NEXT:    andi a2, a1, 128
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_470
+; CHECK-RV64-NEXT:    j .LBB61_968
+; CHECK-RV64-NEXT:  .LBB61_470: # %else1818
+; CHECK-RV64-NEXT:    andi a2, a1, 256
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_471
+; CHECK-RV64-NEXT:    j .LBB61_969
+; CHECK-RV64-NEXT:  .LBB61_471: # %else1822
+; CHECK-RV64-NEXT:    andi a2, a1, 512
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_472
+; CHECK-RV64-NEXT:    j .LBB61_970
+; CHECK-RV64-NEXT:  .LBB61_472: # %else1826
+; CHECK-RV64-NEXT:    andi a2, a1, 1024
+; CHECK-RV64-NEXT:    beqz a2, .LBB61_473
+; CHECK-RV64-NEXT:    j .LBB61_971
+; CHECK-RV64-NEXT:  .LBB61_473: # %else1830
+; CHECK-RV64-NEXT:    slli a2, a1, 52
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_474
+; CHECK-RV64-NEXT:    j .LBB61_972
+; CHECK-RV64-NEXT:  .LBB61_474: # %else1834
+; CHECK-RV64-NEXT:    slli a2, a1, 51
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_475
+; CHECK-RV64-NEXT:    j .LBB61_973
+; CHECK-RV64-NEXT:  .LBB61_475: # %else1838
+; CHECK-RV64-NEXT:    slli a2, a1, 50
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_476
+; CHECK-RV64-NEXT:    j .LBB61_974
+; CHECK-RV64-NEXT:  .LBB61_476: # %else1842
+; CHECK-RV64-NEXT:    slli a2, a1, 49
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_477
+; CHECK-RV64-NEXT:    j .LBB61_975
+; CHECK-RV64-NEXT:  .LBB61_477: # %else1846
+; CHECK-RV64-NEXT:    slli a2, a1, 48
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_478
+; CHECK-RV64-NEXT:    j .LBB61_976
+; CHECK-RV64-NEXT:  .LBB61_478: # %else1850
+; CHECK-RV64-NEXT:    slli a2, a1, 47
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_479
+; CHECK-RV64-NEXT:    j .LBB61_977
+; CHECK-RV64-NEXT:  .LBB61_479: # %else1854
+; CHECK-RV64-NEXT:    slli a2, a1, 46
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_480
+; CHECK-RV64-NEXT:    j .LBB61_978
+; CHECK-RV64-NEXT:  .LBB61_480: # %else1858
+; CHECK-RV64-NEXT:    slli a2, a1, 45
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_481
+; CHECK-RV64-NEXT:    j .LBB61_979
+; CHECK-RV64-NEXT:  .LBB61_481: # %else1862
+; CHECK-RV64-NEXT:    slli a2, a1, 44
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_482
+; CHECK-RV64-NEXT:    j .LBB61_980
+; CHECK-RV64-NEXT:  .LBB61_482: # %else1866
+; CHECK-RV64-NEXT:    slli a2, a1, 43
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_483
+; CHECK-RV64-NEXT:    j .LBB61_981
+; CHECK-RV64-NEXT:  .LBB61_483: # %else1870
+; CHECK-RV64-NEXT:    slli a2, a1, 42
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_484
+; CHECK-RV64-NEXT:    j .LBB61_982
+; CHECK-RV64-NEXT:  .LBB61_484: # %else1874
+; CHECK-RV64-NEXT:    slli a2, a1, 41
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_485
+; CHECK-RV64-NEXT:    j .LBB61_983
+; CHECK-RV64-NEXT:  .LBB61_485: # %else1878
+; CHECK-RV64-NEXT:    slli a2, a1, 40
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_486
+; CHECK-RV64-NEXT:    j .LBB61_984
+; CHECK-RV64-NEXT:  .LBB61_486: # %else1882
+; CHECK-RV64-NEXT:    slli a2, a1, 39
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_487
+; CHECK-RV64-NEXT:    j .LBB61_985
+; CHECK-RV64-NEXT:  .LBB61_487: # %else1886
+; CHECK-RV64-NEXT:    slli a2, a1, 38
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_488
+; CHECK-RV64-NEXT:    j .LBB61_986
+; CHECK-RV64-NEXT:  .LBB61_488: # %else1890
+; CHECK-RV64-NEXT:    slli a2, a1, 37
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_489
+; CHECK-RV64-NEXT:    j .LBB61_987
+; CHECK-RV64-NEXT:  .LBB61_489: # %else1894
+; CHECK-RV64-NEXT:    slli a2, a1, 36
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_490
+; CHECK-RV64-NEXT:    j .LBB61_988
+; CHECK-RV64-NEXT:  .LBB61_490: # %else1898
+; CHECK-RV64-NEXT:    slli a2, a1, 35
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_491
+; CHECK-RV64-NEXT:    j .LBB61_989
+; CHECK-RV64-NEXT:  .LBB61_491: # %else1902
+; CHECK-RV64-NEXT:    slli a2, a1, 34
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_492
+; CHECK-RV64-NEXT:    j .LBB61_990
+; CHECK-RV64-NEXT:  .LBB61_492: # %else1906
+; CHECK-RV64-NEXT:    slli a2, a1, 33
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_493
+; CHECK-RV64-NEXT:    j .LBB61_991
+; CHECK-RV64-NEXT:  .LBB61_493: # %else1910
+; CHECK-RV64-NEXT:    slli a2, a1, 32
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_494
+; CHECK-RV64-NEXT:    j .LBB61_992
+; CHECK-RV64-NEXT:  .LBB61_494: # %else1914
+; CHECK-RV64-NEXT:    slli a2, a1, 31
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_495
+; CHECK-RV64-NEXT:    j .LBB61_993
+; CHECK-RV64-NEXT:  .LBB61_495: # %else1918
+; CHECK-RV64-NEXT:    slli a2, a1, 30
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_496
+; CHECK-RV64-NEXT:    j .LBB61_994
+; CHECK-RV64-NEXT:  .LBB61_496: # %else1922
+; CHECK-RV64-NEXT:    slli a2, a1, 29
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_497
+; CHECK-RV64-NEXT:    j .LBB61_995
+; CHECK-RV64-NEXT:  .LBB61_497: # %else1926
+; CHECK-RV64-NEXT:    slli a2, a1, 28
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_498
+; CHECK-RV64-NEXT:    j .LBB61_996
+; CHECK-RV64-NEXT:  .LBB61_498: # %else1930
+; CHECK-RV64-NEXT:    slli a2, a1, 27
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_499
+; CHECK-RV64-NEXT:    j .LBB61_997
+; CHECK-RV64-NEXT:  .LBB61_499: # %else1934
+; CHECK-RV64-NEXT:    slli a2, a1, 26
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_500
+; CHECK-RV64-NEXT:    j .LBB61_998
+; CHECK-RV64-NEXT:  .LBB61_500: # %else1938
+; CHECK-RV64-NEXT:    slli a2, a1, 25
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_501
+; CHECK-RV64-NEXT:    j .LBB61_999
+; CHECK-RV64-NEXT:  .LBB61_501: # %else1942
+; CHECK-RV64-NEXT:    slli a2, a1, 24
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_502
+; CHECK-RV64-NEXT:    j .LBB61_1000
+; CHECK-RV64-NEXT:  .LBB61_502: # %else1946
+; CHECK-RV64-NEXT:    slli a2, a1, 23
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_503
+; CHECK-RV64-NEXT:    j .LBB61_1001
+; CHECK-RV64-NEXT:  .LBB61_503: # %else1950
+; CHECK-RV64-NEXT:    slli a2, a1, 22
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_504
+; CHECK-RV64-NEXT:    j .LBB61_1002
+; CHECK-RV64-NEXT:  .LBB61_504: # %else1954
+; CHECK-RV64-NEXT:    slli a2, a1, 21
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_505
+; CHECK-RV64-NEXT:    j .LBB61_1003
+; CHECK-RV64-NEXT:  .LBB61_505: # %else1958
+; CHECK-RV64-NEXT:    slli a2, a1, 20
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_506
+; CHECK-RV64-NEXT:    j .LBB61_1004
+; CHECK-RV64-NEXT:  .LBB61_506: # %else1962
+; CHECK-RV64-NEXT:    slli a2, a1, 19
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_507
+; CHECK-RV64-NEXT:    j .LBB61_1005
+; CHECK-RV64-NEXT:  .LBB61_507: # %else1966
+; CHECK-RV64-NEXT:    slli a2, a1, 18
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_508
+; CHECK-RV64-NEXT:    j .LBB61_1006
+; CHECK-RV64-NEXT:  .LBB61_508: # %else1970
+; CHECK-RV64-NEXT:    slli a2, a1, 17
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_509
+; CHECK-RV64-NEXT:    j .LBB61_1007
+; CHECK-RV64-NEXT:  .LBB61_509: # %else1974
+; CHECK-RV64-NEXT:    slli a2, a1, 16
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_510
+; CHECK-RV64-NEXT:    j .LBB61_1008
+; CHECK-RV64-NEXT:  .LBB61_510: # %else1978
+; CHECK-RV64-NEXT:    slli a2, a1, 15
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_511
+; CHECK-RV64-NEXT:    j .LBB61_1009
+; CHECK-RV64-NEXT:  .LBB61_511: # %else1982
+; CHECK-RV64-NEXT:    slli a2, a1, 14
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_512
+; CHECK-RV64-NEXT:    j .LBB61_1010
+; CHECK-RV64-NEXT:  .LBB61_512: # %else1986
+; CHECK-RV64-NEXT:    slli a2, a1, 13
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_513
+; CHECK-RV64-NEXT:    j .LBB61_1011
+; CHECK-RV64-NEXT:  .LBB61_513: # %else1990
+; CHECK-RV64-NEXT:    slli a2, a1, 12
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_514
+; CHECK-RV64-NEXT:    j .LBB61_1012
+; CHECK-RV64-NEXT:  .LBB61_514: # %else1994
+; CHECK-RV64-NEXT:    slli a2, a1, 11
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_515
+; CHECK-RV64-NEXT:    j .LBB61_1013
+; CHECK-RV64-NEXT:  .LBB61_515: # %else1998
+; CHECK-RV64-NEXT:    slli a2, a1, 10
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_516
+; CHECK-RV64-NEXT:    j .LBB61_1014
+; CHECK-RV64-NEXT:  .LBB61_516: # %else2002
+; CHECK-RV64-NEXT:    slli a2, a1, 9
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_517
+; CHECK-RV64-NEXT:    j .LBB61_1015
+; CHECK-RV64-NEXT:  .LBB61_517: # %else2006
+; CHECK-RV64-NEXT:    slli a2, a1, 8
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_518
+; CHECK-RV64-NEXT:    j .LBB61_1016
+; CHECK-RV64-NEXT:  .LBB61_518: # %else2010
+; CHECK-RV64-NEXT:    slli a2, a1, 7
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_519
+; CHECK-RV64-NEXT:    j .LBB61_1017
+; CHECK-RV64-NEXT:  .LBB61_519: # %else2014
+; CHECK-RV64-NEXT:    slli a2, a1, 6
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_520
+; CHECK-RV64-NEXT:    j .LBB61_1018
+; CHECK-RV64-NEXT:  .LBB61_520: # %else2018
+; CHECK-RV64-NEXT:    slli a2, a1, 5
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_521
+; CHECK-RV64-NEXT:    j .LBB61_1019
+; CHECK-RV64-NEXT:  .LBB61_521: # %else2022
+; CHECK-RV64-NEXT:    slli a2, a1, 4
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_522
+; CHECK-RV64-NEXT:    j .LBB61_1020
+; CHECK-RV64-NEXT:  .LBB61_522: # %else2026
+; CHECK-RV64-NEXT:    slli a2, a1, 3
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_523
+; CHECK-RV64-NEXT:    j .LBB61_1021
+; CHECK-RV64-NEXT:  .LBB61_523: # %else2030
+; CHECK-RV64-NEXT:    slli a2, a1, 2
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_524
+; CHECK-RV64-NEXT:    j .LBB61_1022
+; CHECK-RV64-NEXT:  .LBB61_524: # %else2034
+; CHECK-RV64-NEXT:    slli a2, a1, 1
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_525
+; CHECK-RV64-NEXT:    j .LBB61_1023
+; CHECK-RV64-NEXT:  .LBB61_525: # %else2038
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_526
+; CHECK-RV64-NEXT:    j .LBB61_1024
+; CHECK-RV64-NEXT:  .LBB61_526: # %else2042
+; CHECK-RV64-NEXT:    ret
+; CHECK-RV64-NEXT:  .LBB61_527: # %cond.load
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    vsetvli zero, zero, e8, mf8, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v8, a1
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    andi a1, a2, 2
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_528
+; CHECK-RV64-NEXT:    j .LBB61_2
+; CHECK-RV64-NEXT:  .LBB61_528: # %cond.load1
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetvli zero, zero, e8, mf8, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vsetivli zero, 2, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 1
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a1, a2, 4
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_529
+; CHECK-RV64-NEXT:    j .LBB61_3
+; CHECK-RV64-NEXT:  .LBB61_529: # %cond.load5
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 3, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 2
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a1, a2, 8
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_530
+; CHECK-RV64-NEXT:    j .LBB61_4
+; CHECK-RV64-NEXT:  .LBB61_530: # %cond.load9
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 4, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a1, a2, 16
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_531
+; CHECK-RV64-NEXT:    j .LBB61_5
+; CHECK-RV64-NEXT:  .LBB61_531: # %cond.load13
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 5, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 4
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a1, a2, 32
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_532
+; CHECK-RV64-NEXT:    j .LBB61_6
+; CHECK-RV64-NEXT:  .LBB61_532: # %cond.load17
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 6, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 5
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a1, a2, 64
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_533
+; CHECK-RV64-NEXT:    j .LBB61_7
+; CHECK-RV64-NEXT:  .LBB61_533: # %cond.load21
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 7, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 6
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a1, a2, 128
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_534
+; CHECK-RV64-NEXT:    j .LBB61_8
+; CHECK-RV64-NEXT:  .LBB61_534: # %cond.load25
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 8, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 7
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a1, a2, 256
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_535
+; CHECK-RV64-NEXT:    j .LBB61_9
+; CHECK-RV64-NEXT:  .LBB61_535: # %cond.load29
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 9, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 8
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a1, a2, 512
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_536
+; CHECK-RV64-NEXT:    j .LBB61_10
+; CHECK-RV64-NEXT:  .LBB61_536: # %cond.load33
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 10, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 9
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a1, a2, 1024
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_537
+; CHECK-RV64-NEXT:    j .LBB61_11
+; CHECK-RV64-NEXT:  .LBB61_537: # %cond.load37
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 11, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 10
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 52
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_538
+; CHECK-RV64-NEXT:    j .LBB61_12
+; CHECK-RV64-NEXT:  .LBB61_538: # %cond.load41
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 12, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 11
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 51
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_539
+; CHECK-RV64-NEXT:    j .LBB61_13
+; CHECK-RV64-NEXT:  .LBB61_539: # %cond.load45
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 13, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 12
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 50
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_540
+; CHECK-RV64-NEXT:    j .LBB61_14
+; CHECK-RV64-NEXT:  .LBB61_540: # %cond.load49
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 14, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 13
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 49
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_541
+; CHECK-RV64-NEXT:    j .LBB61_15
+; CHECK-RV64-NEXT:  .LBB61_541: # %cond.load53
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 15, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 14
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 48
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_542
+; CHECK-RV64-NEXT:    j .LBB61_16
+; CHECK-RV64-NEXT:  .LBB61_542: # %cond.load57
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 16, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 15
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 47
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_543
+; CHECK-RV64-NEXT:    j .LBB61_17
+; CHECK-RV64-NEXT:  .LBB61_543: # %cond.load61
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 17, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 16
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 46
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_544
+; CHECK-RV64-NEXT:    j .LBB61_18
+; CHECK-RV64-NEXT:  .LBB61_544: # %cond.load65
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 18, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 17
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 45
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_545
+; CHECK-RV64-NEXT:    j .LBB61_19
+; CHECK-RV64-NEXT:  .LBB61_545: # %cond.load69
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 19, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 18
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 44
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_546
+; CHECK-RV64-NEXT:    j .LBB61_20
+; CHECK-RV64-NEXT:  .LBB61_546: # %cond.load73
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 20, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 19
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 43
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_547
+; CHECK-RV64-NEXT:    j .LBB61_21
+; CHECK-RV64-NEXT:  .LBB61_547: # %cond.load77
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 21, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 20
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 42
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_548
+; CHECK-RV64-NEXT:    j .LBB61_22
+; CHECK-RV64-NEXT:  .LBB61_548: # %cond.load81
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 22, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 21
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 41
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_549
+; CHECK-RV64-NEXT:    j .LBB61_23
+; CHECK-RV64-NEXT:  .LBB61_549: # %cond.load85
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 23, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 22
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 40
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_550
+; CHECK-RV64-NEXT:    j .LBB61_24
+; CHECK-RV64-NEXT:  .LBB61_550: # %cond.load89
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 24, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 23
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 39
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_551
+; CHECK-RV64-NEXT:    j .LBB61_25
+; CHECK-RV64-NEXT:  .LBB61_551: # %cond.load93
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 25, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 24
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 38
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_552
+; CHECK-RV64-NEXT:    j .LBB61_26
+; CHECK-RV64-NEXT:  .LBB61_552: # %cond.load97
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 26, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 25
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 37
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_553
+; CHECK-RV64-NEXT:    j .LBB61_27
+; CHECK-RV64-NEXT:  .LBB61_553: # %cond.load101
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 27, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 26
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 36
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_554
+; CHECK-RV64-NEXT:    j .LBB61_28
+; CHECK-RV64-NEXT:  .LBB61_554: # %cond.load105
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 28, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 27
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 35
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_555
+; CHECK-RV64-NEXT:    j .LBB61_29
+; CHECK-RV64-NEXT:  .LBB61_555: # %cond.load109
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 29, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 28
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 34
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_556
+; CHECK-RV64-NEXT:    j .LBB61_30
+; CHECK-RV64-NEXT:  .LBB61_556: # %cond.load113
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 30, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 29
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 33
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_557
+; CHECK-RV64-NEXT:    j .LBB61_31
+; CHECK-RV64-NEXT:  .LBB61_557: # %cond.load117
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vsetivli zero, 31, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    vslideup.vi v8, v16, 30
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 32
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_558
+; CHECK-RV64-NEXT:    j .LBB61_32
+; CHECK-RV64-NEXT:  .LBB61_558: # %cond.load121
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 32
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vi v8, v24, 31
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 31
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_559
+; CHECK-RV64-NEXT:    j .LBB61_33
+; CHECK-RV64-NEXT:  .LBB61_559: # %cond.load125
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 33
+; CHECK-RV64-NEXT:    li a3, 32
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 30
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_560
+; CHECK-RV64-NEXT:    j .LBB61_34
+; CHECK-RV64-NEXT:  .LBB61_560: # %cond.load129
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 34
+; CHECK-RV64-NEXT:    li a3, 33
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 29
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_561
+; CHECK-RV64-NEXT:    j .LBB61_35
+; CHECK-RV64-NEXT:  .LBB61_561: # %cond.load133
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 35
+; CHECK-RV64-NEXT:    li a3, 34
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 28
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_562
+; CHECK-RV64-NEXT:    j .LBB61_36
+; CHECK-RV64-NEXT:  .LBB61_562: # %cond.load137
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 36
+; CHECK-RV64-NEXT:    li a3, 35
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 27
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_563
+; CHECK-RV64-NEXT:    j .LBB61_37
+; CHECK-RV64-NEXT:  .LBB61_563: # %cond.load141
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 37
+; CHECK-RV64-NEXT:    li a3, 36
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 26
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_564
+; CHECK-RV64-NEXT:    j .LBB61_38
+; CHECK-RV64-NEXT:  .LBB61_564: # %cond.load145
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 38
+; CHECK-RV64-NEXT:    li a3, 37
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 25
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_565
+; CHECK-RV64-NEXT:    j .LBB61_39
+; CHECK-RV64-NEXT:  .LBB61_565: # %cond.load149
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 39
+; CHECK-RV64-NEXT:    li a3, 38
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 24
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_566
+; CHECK-RV64-NEXT:    j .LBB61_40
+; CHECK-RV64-NEXT:  .LBB61_566: # %cond.load153
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 40
+; CHECK-RV64-NEXT:    li a3, 39
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 23
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_567
+; CHECK-RV64-NEXT:    j .LBB61_41
+; CHECK-RV64-NEXT:  .LBB61_567: # %cond.load157
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 41
+; CHECK-RV64-NEXT:    li a3, 40
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 22
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_568
+; CHECK-RV64-NEXT:    j .LBB61_42
+; CHECK-RV64-NEXT:  .LBB61_568: # %cond.load161
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 42
+; CHECK-RV64-NEXT:    li a3, 41
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 21
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_569
+; CHECK-RV64-NEXT:    j .LBB61_43
+; CHECK-RV64-NEXT:  .LBB61_569: # %cond.load165
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 43
+; CHECK-RV64-NEXT:    li a3, 42
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 20
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_570
+; CHECK-RV64-NEXT:    j .LBB61_44
+; CHECK-RV64-NEXT:  .LBB61_570: # %cond.load169
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 44
+; CHECK-RV64-NEXT:    li a3, 43
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 19
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_571
+; CHECK-RV64-NEXT:    j .LBB61_45
+; CHECK-RV64-NEXT:  .LBB61_571: # %cond.load173
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 45
+; CHECK-RV64-NEXT:    li a3, 44
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 18
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_572
+; CHECK-RV64-NEXT:    j .LBB61_46
+; CHECK-RV64-NEXT:  .LBB61_572: # %cond.load177
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 46
+; CHECK-RV64-NEXT:    li a3, 45
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 17
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_573
+; CHECK-RV64-NEXT:    j .LBB61_47
+; CHECK-RV64-NEXT:  .LBB61_573: # %cond.load181
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 47
+; CHECK-RV64-NEXT:    li a3, 46
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 16
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_574
+; CHECK-RV64-NEXT:    j .LBB61_48
+; CHECK-RV64-NEXT:  .LBB61_574: # %cond.load185
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 48
+; CHECK-RV64-NEXT:    li a3, 47
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 15
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_575
+; CHECK-RV64-NEXT:    j .LBB61_49
+; CHECK-RV64-NEXT:  .LBB61_575: # %cond.load189
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 49
+; CHECK-RV64-NEXT:    li a3, 48
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 14
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_576
+; CHECK-RV64-NEXT:    j .LBB61_50
+; CHECK-RV64-NEXT:  .LBB61_576: # %cond.load193
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 50
+; CHECK-RV64-NEXT:    li a3, 49
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 13
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_577
+; CHECK-RV64-NEXT:    j .LBB61_51
+; CHECK-RV64-NEXT:  .LBB61_577: # %cond.load197
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 51
+; CHECK-RV64-NEXT:    li a3, 50
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 12
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_578
+; CHECK-RV64-NEXT:    j .LBB61_52
+; CHECK-RV64-NEXT:  .LBB61_578: # %cond.load201
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 52
+; CHECK-RV64-NEXT:    li a3, 51
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 11
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_579
+; CHECK-RV64-NEXT:    j .LBB61_53
+; CHECK-RV64-NEXT:  .LBB61_579: # %cond.load205
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 53
+; CHECK-RV64-NEXT:    li a3, 52
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 10
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_580
+; CHECK-RV64-NEXT:    j .LBB61_54
+; CHECK-RV64-NEXT:  .LBB61_580: # %cond.load209
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 54
+; CHECK-RV64-NEXT:    li a3, 53
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 9
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_581
+; CHECK-RV64-NEXT:    j .LBB61_55
+; CHECK-RV64-NEXT:  .LBB61_581: # %cond.load213
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 55
+; CHECK-RV64-NEXT:    li a3, 54
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 8
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_582
+; CHECK-RV64-NEXT:    j .LBB61_56
+; CHECK-RV64-NEXT:  .LBB61_582: # %cond.load217
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 56
+; CHECK-RV64-NEXT:    li a3, 55
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 7
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_583
+; CHECK-RV64-NEXT:    j .LBB61_57
+; CHECK-RV64-NEXT:  .LBB61_583: # %cond.load221
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 57
+; CHECK-RV64-NEXT:    li a3, 56
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 6
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_584
+; CHECK-RV64-NEXT:    j .LBB61_58
+; CHECK-RV64-NEXT:  .LBB61_584: # %cond.load225
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 58
+; CHECK-RV64-NEXT:    li a3, 57
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 5
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_585
+; CHECK-RV64-NEXT:    j .LBB61_59
+; CHECK-RV64-NEXT:  .LBB61_585: # %cond.load229
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 59
+; CHECK-RV64-NEXT:    li a3, 58
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 4
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_586
+; CHECK-RV64-NEXT:    j .LBB61_60
+; CHECK-RV64-NEXT:  .LBB61_586: # %cond.load233
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 60
+; CHECK-RV64-NEXT:    li a3, 59
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 3
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_587
+; CHECK-RV64-NEXT:    j .LBB61_61
+; CHECK-RV64-NEXT:  .LBB61_587: # %cond.load237
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 61
+; CHECK-RV64-NEXT:    li a3, 60
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a1, a2, 2
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_1025
+; CHECK-RV64-NEXT:    j .LBB61_62
+; CHECK-RV64-NEXT:  .LBB61_1025: # %cond.load237
+; CHECK-RV64-NEXT:    j .LBB61_63
+; CHECK-RV64-NEXT:  .LBB61_588: # %cond.load249
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 64
+; CHECK-RV64-NEXT:    li a3, 63
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m1, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv1r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    andi a2, a1, 1
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_589
+; CHECK-RV64-NEXT:    j .LBB61_67
+; CHECK-RV64-NEXT:  .LBB61_589: # %cond.load253
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 65
+; CHECK-RV64-NEXT:    li a3, 64
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    andi a2, a1, 2
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_590
+; CHECK-RV64-NEXT:    j .LBB61_68
+; CHECK-RV64-NEXT:  .LBB61_590: # %cond.load257
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 66
+; CHECK-RV64-NEXT:    li a3, 65
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    andi a2, a1, 4
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_591
+; CHECK-RV64-NEXT:    j .LBB61_69
+; CHECK-RV64-NEXT:  .LBB61_591: # %cond.load261
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 67
+; CHECK-RV64-NEXT:    li a3, 66
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    andi a2, a1, 8
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_592
+; CHECK-RV64-NEXT:    j .LBB61_70
+; CHECK-RV64-NEXT:  .LBB61_592: # %cond.load265
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 68
+; CHECK-RV64-NEXT:    li a3, 67
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    andi a2, a1, 16
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_593
+; CHECK-RV64-NEXT:    j .LBB61_71
+; CHECK-RV64-NEXT:  .LBB61_593: # %cond.load269
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 69
+; CHECK-RV64-NEXT:    li a3, 68
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    andi a2, a1, 32
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_594
+; CHECK-RV64-NEXT:    j .LBB61_72
+; CHECK-RV64-NEXT:  .LBB61_594: # %cond.load273
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 70
+; CHECK-RV64-NEXT:    li a3, 69
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    andi a2, a1, 64
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_595
+; CHECK-RV64-NEXT:    j .LBB61_73
+; CHECK-RV64-NEXT:  .LBB61_595: # %cond.load277
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 71
+; CHECK-RV64-NEXT:    li a3, 70
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    andi a2, a1, 128
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_596
+; CHECK-RV64-NEXT:    j .LBB61_74
+; CHECK-RV64-NEXT:  .LBB61_596: # %cond.load281
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 72
+; CHECK-RV64-NEXT:    li a3, 71
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    andi a2, a1, 256
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_597
+; CHECK-RV64-NEXT:    j .LBB61_75
+; CHECK-RV64-NEXT:  .LBB61_597: # %cond.load285
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 73
+; CHECK-RV64-NEXT:    li a3, 72
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    andi a2, a1, 512
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_598
+; CHECK-RV64-NEXT:    j .LBB61_76
+; CHECK-RV64-NEXT:  .LBB61_598: # %cond.load289
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 74
+; CHECK-RV64-NEXT:    li a3, 73
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    andi a2, a1, 1024
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_599
+; CHECK-RV64-NEXT:    j .LBB61_77
+; CHECK-RV64-NEXT:  .LBB61_599: # %cond.load293
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 75
+; CHECK-RV64-NEXT:    li a3, 74
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 52
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_600
+; CHECK-RV64-NEXT:    j .LBB61_78
+; CHECK-RV64-NEXT:  .LBB61_600: # %cond.load297
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 76
+; CHECK-RV64-NEXT:    li a3, 75
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 51
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_601
+; CHECK-RV64-NEXT:    j .LBB61_79
+; CHECK-RV64-NEXT:  .LBB61_601: # %cond.load301
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 77
+; CHECK-RV64-NEXT:    li a3, 76
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 50
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_602
+; CHECK-RV64-NEXT:    j .LBB61_80
+; CHECK-RV64-NEXT:  .LBB61_602: # %cond.load305
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 78
+; CHECK-RV64-NEXT:    li a3, 77
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 49
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_603
+; CHECK-RV64-NEXT:    j .LBB61_81
+; CHECK-RV64-NEXT:  .LBB61_603: # %cond.load309
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 79
+; CHECK-RV64-NEXT:    li a3, 78
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 48
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_604
+; CHECK-RV64-NEXT:    j .LBB61_82
+; CHECK-RV64-NEXT:  .LBB61_604: # %cond.load313
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 80
+; CHECK-RV64-NEXT:    li a3, 79
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 47
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_605
+; CHECK-RV64-NEXT:    j .LBB61_83
+; CHECK-RV64-NEXT:  .LBB61_605: # %cond.load317
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 81
+; CHECK-RV64-NEXT:    li a3, 80
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 46
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_606
+; CHECK-RV64-NEXT:    j .LBB61_84
+; CHECK-RV64-NEXT:  .LBB61_606: # %cond.load321
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 82
+; CHECK-RV64-NEXT:    li a3, 81
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 45
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_607
+; CHECK-RV64-NEXT:    j .LBB61_85
+; CHECK-RV64-NEXT:  .LBB61_607: # %cond.load325
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 83
+; CHECK-RV64-NEXT:    li a3, 82
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 44
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_608
+; CHECK-RV64-NEXT:    j .LBB61_86
+; CHECK-RV64-NEXT:  .LBB61_608: # %cond.load329
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 84
+; CHECK-RV64-NEXT:    li a3, 83
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 43
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_609
+; CHECK-RV64-NEXT:    j .LBB61_87
+; CHECK-RV64-NEXT:  .LBB61_609: # %cond.load333
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 85
+; CHECK-RV64-NEXT:    li a3, 84
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 42
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_610
+; CHECK-RV64-NEXT:    j .LBB61_88
+; CHECK-RV64-NEXT:  .LBB61_610: # %cond.load337
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 86
+; CHECK-RV64-NEXT:    li a3, 85
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 41
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_611
+; CHECK-RV64-NEXT:    j .LBB61_89
+; CHECK-RV64-NEXT:  .LBB61_611: # %cond.load341
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 87
+; CHECK-RV64-NEXT:    li a3, 86
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 40
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_612
+; CHECK-RV64-NEXT:    j .LBB61_90
+; CHECK-RV64-NEXT:  .LBB61_612: # %cond.load345
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 88
+; CHECK-RV64-NEXT:    li a3, 87
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 39
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_613
+; CHECK-RV64-NEXT:    j .LBB61_91
+; CHECK-RV64-NEXT:  .LBB61_613: # %cond.load349
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 89
+; CHECK-RV64-NEXT:    li a3, 88
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 38
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_614
+; CHECK-RV64-NEXT:    j .LBB61_92
+; CHECK-RV64-NEXT:  .LBB61_614: # %cond.load353
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 90
+; CHECK-RV64-NEXT:    li a3, 89
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 37
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_615
+; CHECK-RV64-NEXT:    j .LBB61_93
+; CHECK-RV64-NEXT:  .LBB61_615: # %cond.load357
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 91
+; CHECK-RV64-NEXT:    li a3, 90
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 36
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_616
+; CHECK-RV64-NEXT:    j .LBB61_94
+; CHECK-RV64-NEXT:  .LBB61_616: # %cond.load361
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 92
+; CHECK-RV64-NEXT:    li a3, 91
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 35
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_617
+; CHECK-RV64-NEXT:    j .LBB61_95
+; CHECK-RV64-NEXT:  .LBB61_617: # %cond.load365
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 93
+; CHECK-RV64-NEXT:    li a3, 92
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 34
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_618
+; CHECK-RV64-NEXT:    j .LBB61_96
+; CHECK-RV64-NEXT:  .LBB61_618: # %cond.load369
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 94
+; CHECK-RV64-NEXT:    li a3, 93
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 33
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_619
+; CHECK-RV64-NEXT:    j .LBB61_97
+; CHECK-RV64-NEXT:  .LBB61_619: # %cond.load373
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 95
+; CHECK-RV64-NEXT:    li a3, 94
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 32
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_620
+; CHECK-RV64-NEXT:    j .LBB61_98
+; CHECK-RV64-NEXT:  .LBB61_620: # %cond.load377
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 96
+; CHECK-RV64-NEXT:    li a3, 95
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 31
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_621
+; CHECK-RV64-NEXT:    j .LBB61_99
+; CHECK-RV64-NEXT:  .LBB61_621: # %cond.load381
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 97
+; CHECK-RV64-NEXT:    li a3, 96
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 30
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_622
+; CHECK-RV64-NEXT:    j .LBB61_100
+; CHECK-RV64-NEXT:  .LBB61_622: # %cond.load385
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 98
+; CHECK-RV64-NEXT:    li a3, 97
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 29
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_623
+; CHECK-RV64-NEXT:    j .LBB61_101
+; CHECK-RV64-NEXT:  .LBB61_623: # %cond.load389
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 99
+; CHECK-RV64-NEXT:    li a3, 98
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 28
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_624
+; CHECK-RV64-NEXT:    j .LBB61_102
+; CHECK-RV64-NEXT:  .LBB61_624: # %cond.load393
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 100
+; CHECK-RV64-NEXT:    li a3, 99
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 27
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_625
+; CHECK-RV64-NEXT:    j .LBB61_103
+; CHECK-RV64-NEXT:  .LBB61_625: # %cond.load397
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 101
+; CHECK-RV64-NEXT:    li a3, 100
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 26
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_626
+; CHECK-RV64-NEXT:    j .LBB61_104
+; CHECK-RV64-NEXT:  .LBB61_626: # %cond.load401
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 102
+; CHECK-RV64-NEXT:    li a3, 101
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 25
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_627
+; CHECK-RV64-NEXT:    j .LBB61_105
+; CHECK-RV64-NEXT:  .LBB61_627: # %cond.load405
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 103
+; CHECK-RV64-NEXT:    li a3, 102
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 24
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_628
+; CHECK-RV64-NEXT:    j .LBB61_106
+; CHECK-RV64-NEXT:  .LBB61_628: # %cond.load409
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 104
+; CHECK-RV64-NEXT:    li a3, 103
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 23
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_629
+; CHECK-RV64-NEXT:    j .LBB61_107
+; CHECK-RV64-NEXT:  .LBB61_629: # %cond.load413
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 105
+; CHECK-RV64-NEXT:    li a3, 104
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 22
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_630
+; CHECK-RV64-NEXT:    j .LBB61_108
+; CHECK-RV64-NEXT:  .LBB61_630: # %cond.load417
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 106
+; CHECK-RV64-NEXT:    li a3, 105
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 21
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_631
+; CHECK-RV64-NEXT:    j .LBB61_109
+; CHECK-RV64-NEXT:  .LBB61_631: # %cond.load421
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 107
+; CHECK-RV64-NEXT:    li a3, 106
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 20
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_632
+; CHECK-RV64-NEXT:    j .LBB61_110
+; CHECK-RV64-NEXT:  .LBB61_632: # %cond.load425
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 108
+; CHECK-RV64-NEXT:    li a3, 107
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 19
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_633
+; CHECK-RV64-NEXT:    j .LBB61_111
+; CHECK-RV64-NEXT:  .LBB61_633: # %cond.load429
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 109
+; CHECK-RV64-NEXT:    li a3, 108
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 18
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_634
+; CHECK-RV64-NEXT:    j .LBB61_112
+; CHECK-RV64-NEXT:  .LBB61_634: # %cond.load433
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 110
+; CHECK-RV64-NEXT:    li a3, 109
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 17
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_635
+; CHECK-RV64-NEXT:    j .LBB61_113
+; CHECK-RV64-NEXT:  .LBB61_635: # %cond.load437
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 111
+; CHECK-RV64-NEXT:    li a3, 110
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 16
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_636
+; CHECK-RV64-NEXT:    j .LBB61_114
+; CHECK-RV64-NEXT:  .LBB61_636: # %cond.load441
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 112
+; CHECK-RV64-NEXT:    li a3, 111
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 15
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_637
+; CHECK-RV64-NEXT:    j .LBB61_115
+; CHECK-RV64-NEXT:  .LBB61_637: # %cond.load445
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 113
+; CHECK-RV64-NEXT:    li a3, 112
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 14
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_638
+; CHECK-RV64-NEXT:    j .LBB61_116
+; CHECK-RV64-NEXT:  .LBB61_638: # %cond.load449
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 114
+; CHECK-RV64-NEXT:    li a3, 113
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 13
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_639
+; CHECK-RV64-NEXT:    j .LBB61_117
+; CHECK-RV64-NEXT:  .LBB61_639: # %cond.load453
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 115
+; CHECK-RV64-NEXT:    li a3, 114
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 12
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_640
+; CHECK-RV64-NEXT:    j .LBB61_118
+; CHECK-RV64-NEXT:  .LBB61_640: # %cond.load457
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 116
+; CHECK-RV64-NEXT:    li a3, 115
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 11
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_641
+; CHECK-RV64-NEXT:    j .LBB61_119
+; CHECK-RV64-NEXT:  .LBB61_641: # %cond.load461
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 117
+; CHECK-RV64-NEXT:    li a3, 116
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 10
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_642
+; CHECK-RV64-NEXT:    j .LBB61_120
+; CHECK-RV64-NEXT:  .LBB61_642: # %cond.load465
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 118
+; CHECK-RV64-NEXT:    li a3, 117
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 9
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_643
+; CHECK-RV64-NEXT:    j .LBB61_121
+; CHECK-RV64-NEXT:  .LBB61_643: # %cond.load469
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 119
+; CHECK-RV64-NEXT:    li a3, 118
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 8
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_644
+; CHECK-RV64-NEXT:    j .LBB61_122
+; CHECK-RV64-NEXT:  .LBB61_644: # %cond.load473
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 120
+; CHECK-RV64-NEXT:    li a3, 119
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 7
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_645
+; CHECK-RV64-NEXT:    j .LBB61_123
+; CHECK-RV64-NEXT:  .LBB61_645: # %cond.load477
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 121
+; CHECK-RV64-NEXT:    li a3, 120
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 6
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_646
+; CHECK-RV64-NEXT:    j .LBB61_124
+; CHECK-RV64-NEXT:  .LBB61_646: # %cond.load481
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 122
+; CHECK-RV64-NEXT:    li a3, 121
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 5
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_647
+; CHECK-RV64-NEXT:    j .LBB61_125
+; CHECK-RV64-NEXT:  .LBB61_647: # %cond.load485
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 123
+; CHECK-RV64-NEXT:    li a3, 122
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 4
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_648
+; CHECK-RV64-NEXT:    j .LBB61_126
+; CHECK-RV64-NEXT:  .LBB61_648: # %cond.load489
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 124
+; CHECK-RV64-NEXT:    li a3, 123
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 3
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_649
+; CHECK-RV64-NEXT:    j .LBB61_127
+; CHECK-RV64-NEXT:  .LBB61_649: # %cond.load493
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v24, a2
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a2, 125
+; CHECK-RV64-NEXT:    li a3, 124
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    slli a2, a1, 2
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_1026
+; CHECK-RV64-NEXT:    j .LBB61_128
+; CHECK-RV64-NEXT:  .LBB61_1026: # %cond.load493
+; CHECK-RV64-NEXT:    j .LBB61_129
+; CHECK-RV64-NEXT:  .LBB61_650: # %cond.load505
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vmv.s.x v24, a1
+; CHECK-RV64-NEXT:    vmv8r.v v16, v8
+; CHECK-RV64-NEXT:    li a1, 128
+; CHECK-RV64-NEXT:    li a3, 127
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m2, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v24, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv2r.v v16, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v16
+; CHECK-RV64-NEXT:    andi a1, a2, 1
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_651
+; CHECK-RV64-NEXT:    j .LBB61_133
+; CHECK-RV64-NEXT:  .LBB61_651: # %cond.load509
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 129
+; CHECK-RV64-NEXT:    li a3, 128
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a1, a2, 2
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_652
+; CHECK-RV64-NEXT:    j .LBB61_134
+; CHECK-RV64-NEXT:  .LBB61_652: # %cond.load513
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 130
+; CHECK-RV64-NEXT:    li a3, 129
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a1, a2, 4
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_653
+; CHECK-RV64-NEXT:    j .LBB61_135
+; CHECK-RV64-NEXT:  .LBB61_653: # %cond.load517
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 131
+; CHECK-RV64-NEXT:    li a3, 130
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a1, a2, 8
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_654
+; CHECK-RV64-NEXT:    j .LBB61_136
+; CHECK-RV64-NEXT:  .LBB61_654: # %cond.load521
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 132
+; CHECK-RV64-NEXT:    li a3, 131
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a1, a2, 16
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_655
+; CHECK-RV64-NEXT:    j .LBB61_137
+; CHECK-RV64-NEXT:  .LBB61_655: # %cond.load525
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 133
+; CHECK-RV64-NEXT:    li a3, 132
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a1, a2, 32
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_656
+; CHECK-RV64-NEXT:    j .LBB61_138
+; CHECK-RV64-NEXT:  .LBB61_656: # %cond.load529
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 134
+; CHECK-RV64-NEXT:    li a3, 133
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a1, a2, 64
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_657
+; CHECK-RV64-NEXT:    j .LBB61_139
+; CHECK-RV64-NEXT:  .LBB61_657: # %cond.load533
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 135
+; CHECK-RV64-NEXT:    li a3, 134
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a1, a2, 128
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_658
+; CHECK-RV64-NEXT:    j .LBB61_140
+; CHECK-RV64-NEXT:  .LBB61_658: # %cond.load537
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 136
+; CHECK-RV64-NEXT:    li a3, 135
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a1, a2, 256
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_659
+; CHECK-RV64-NEXT:    j .LBB61_141
+; CHECK-RV64-NEXT:  .LBB61_659: # %cond.load541
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 137
+; CHECK-RV64-NEXT:    li a3, 136
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a1, a2, 512
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_660
+; CHECK-RV64-NEXT:    j .LBB61_142
+; CHECK-RV64-NEXT:  .LBB61_660: # %cond.load545
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 138
+; CHECK-RV64-NEXT:    li a3, 137
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a1, a2, 1024
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_661
+; CHECK-RV64-NEXT:    j .LBB61_143
+; CHECK-RV64-NEXT:  .LBB61_661: # %cond.load549
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 139
+; CHECK-RV64-NEXT:    li a3, 138
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 52
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_662
+; CHECK-RV64-NEXT:    j .LBB61_144
+; CHECK-RV64-NEXT:  .LBB61_662: # %cond.load553
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 140
+; CHECK-RV64-NEXT:    li a3, 139
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 51
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_663
+; CHECK-RV64-NEXT:    j .LBB61_145
+; CHECK-RV64-NEXT:  .LBB61_663: # %cond.load557
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 141
+; CHECK-RV64-NEXT:    li a3, 140
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 50
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_664
+; CHECK-RV64-NEXT:    j .LBB61_146
+; CHECK-RV64-NEXT:  .LBB61_664: # %cond.load561
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 142
+; CHECK-RV64-NEXT:    li a3, 141
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 49
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_665
+; CHECK-RV64-NEXT:    j .LBB61_147
+; CHECK-RV64-NEXT:  .LBB61_665: # %cond.load565
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 143
+; CHECK-RV64-NEXT:    li a3, 142
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 48
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_666
+; CHECK-RV64-NEXT:    j .LBB61_148
+; CHECK-RV64-NEXT:  .LBB61_666: # %cond.load569
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 144
+; CHECK-RV64-NEXT:    li a3, 143
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 47
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_667
+; CHECK-RV64-NEXT:    j .LBB61_149
+; CHECK-RV64-NEXT:  .LBB61_667: # %cond.load573
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 145
+; CHECK-RV64-NEXT:    li a3, 144
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 46
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_668
+; CHECK-RV64-NEXT:    j .LBB61_150
+; CHECK-RV64-NEXT:  .LBB61_668: # %cond.load577
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 146
+; CHECK-RV64-NEXT:    li a3, 145
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 45
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_669
+; CHECK-RV64-NEXT:    j .LBB61_151
+; CHECK-RV64-NEXT:  .LBB61_669: # %cond.load581
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 147
+; CHECK-RV64-NEXT:    li a3, 146
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 44
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_670
+; CHECK-RV64-NEXT:    j .LBB61_152
+; CHECK-RV64-NEXT:  .LBB61_670: # %cond.load585
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 148
+; CHECK-RV64-NEXT:    li a3, 147
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 43
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_671
+; CHECK-RV64-NEXT:    j .LBB61_153
+; CHECK-RV64-NEXT:  .LBB61_671: # %cond.load589
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 149
+; CHECK-RV64-NEXT:    li a3, 148
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 42
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_672
+; CHECK-RV64-NEXT:    j .LBB61_154
+; CHECK-RV64-NEXT:  .LBB61_672: # %cond.load593
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 150
+; CHECK-RV64-NEXT:    li a3, 149
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 41
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_673
+; CHECK-RV64-NEXT:    j .LBB61_155
+; CHECK-RV64-NEXT:  .LBB61_673: # %cond.load597
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 151
+; CHECK-RV64-NEXT:    li a3, 150
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 40
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_674
+; CHECK-RV64-NEXT:    j .LBB61_156
+; CHECK-RV64-NEXT:  .LBB61_674: # %cond.load601
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 152
+; CHECK-RV64-NEXT:    li a3, 151
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 39
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_675
+; CHECK-RV64-NEXT:    j .LBB61_157
+; CHECK-RV64-NEXT:  .LBB61_675: # %cond.load605
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 153
+; CHECK-RV64-NEXT:    li a3, 152
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 38
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_676
+; CHECK-RV64-NEXT:    j .LBB61_158
+; CHECK-RV64-NEXT:  .LBB61_676: # %cond.load609
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 154
+; CHECK-RV64-NEXT:    li a3, 153
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 37
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_677
+; CHECK-RV64-NEXT:    j .LBB61_159
+; CHECK-RV64-NEXT:  .LBB61_677: # %cond.load613
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 155
+; CHECK-RV64-NEXT:    li a3, 154
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 36
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_678
+; CHECK-RV64-NEXT:    j .LBB61_160
+; CHECK-RV64-NEXT:  .LBB61_678: # %cond.load617
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 156
+; CHECK-RV64-NEXT:    li a3, 155
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 35
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_679
+; CHECK-RV64-NEXT:    j .LBB61_161
+; CHECK-RV64-NEXT:  .LBB61_679: # %cond.load621
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 157
+; CHECK-RV64-NEXT:    li a3, 156
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 34
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_680
+; CHECK-RV64-NEXT:    j .LBB61_162
+; CHECK-RV64-NEXT:  .LBB61_680: # %cond.load625
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 158
+; CHECK-RV64-NEXT:    li a3, 157
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 33
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_681
+; CHECK-RV64-NEXT:    j .LBB61_163
+; CHECK-RV64-NEXT:  .LBB61_681: # %cond.load629
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 159
+; CHECK-RV64-NEXT:    li a3, 158
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 32
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_682
+; CHECK-RV64-NEXT:    j .LBB61_164
+; CHECK-RV64-NEXT:  .LBB61_682: # %cond.load633
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 160
+; CHECK-RV64-NEXT:    li a3, 159
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 31
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_683
+; CHECK-RV64-NEXT:    j .LBB61_165
+; CHECK-RV64-NEXT:  .LBB61_683: # %cond.load637
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 161
+; CHECK-RV64-NEXT:    li a3, 160
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 30
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_684
+; CHECK-RV64-NEXT:    j .LBB61_166
+; CHECK-RV64-NEXT:  .LBB61_684: # %cond.load641
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 162
+; CHECK-RV64-NEXT:    li a3, 161
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 29
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_685
+; CHECK-RV64-NEXT:    j .LBB61_167
+; CHECK-RV64-NEXT:  .LBB61_685: # %cond.load645
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 163
+; CHECK-RV64-NEXT:    li a3, 162
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 28
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_686
+; CHECK-RV64-NEXT:    j .LBB61_168
+; CHECK-RV64-NEXT:  .LBB61_686: # %cond.load649
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 164
+; CHECK-RV64-NEXT:    li a3, 163
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 27
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_687
+; CHECK-RV64-NEXT:    j .LBB61_169
+; CHECK-RV64-NEXT:  .LBB61_687: # %cond.load653
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 165
+; CHECK-RV64-NEXT:    li a3, 164
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 26
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_688
+; CHECK-RV64-NEXT:    j .LBB61_170
+; CHECK-RV64-NEXT:  .LBB61_688: # %cond.load657
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 166
+; CHECK-RV64-NEXT:    li a3, 165
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 25
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_689
+; CHECK-RV64-NEXT:    j .LBB61_171
+; CHECK-RV64-NEXT:  .LBB61_689: # %cond.load661
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 167
+; CHECK-RV64-NEXT:    li a3, 166
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 24
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_690
+; CHECK-RV64-NEXT:    j .LBB61_172
+; CHECK-RV64-NEXT:  .LBB61_690: # %cond.load665
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 168
+; CHECK-RV64-NEXT:    li a3, 167
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 23
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_691
+; CHECK-RV64-NEXT:    j .LBB61_173
+; CHECK-RV64-NEXT:  .LBB61_691: # %cond.load669
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 169
+; CHECK-RV64-NEXT:    li a3, 168
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 22
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_692
+; CHECK-RV64-NEXT:    j .LBB61_174
+; CHECK-RV64-NEXT:  .LBB61_692: # %cond.load673
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 170
+; CHECK-RV64-NEXT:    li a3, 169
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 21
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_693
+; CHECK-RV64-NEXT:    j .LBB61_175
+; CHECK-RV64-NEXT:  .LBB61_693: # %cond.load677
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 171
+; CHECK-RV64-NEXT:    li a3, 170
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 20
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_694
+; CHECK-RV64-NEXT:    j .LBB61_176
+; CHECK-RV64-NEXT:  .LBB61_694: # %cond.load681
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 172
+; CHECK-RV64-NEXT:    li a3, 171
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 19
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_695
+; CHECK-RV64-NEXT:    j .LBB61_177
+; CHECK-RV64-NEXT:  .LBB61_695: # %cond.load685
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 173
+; CHECK-RV64-NEXT:    li a3, 172
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 18
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_696
+; CHECK-RV64-NEXT:    j .LBB61_178
+; CHECK-RV64-NEXT:  .LBB61_696: # %cond.load689
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 174
+; CHECK-RV64-NEXT:    li a3, 173
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 17
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_697
+; CHECK-RV64-NEXT:    j .LBB61_179
+; CHECK-RV64-NEXT:  .LBB61_697: # %cond.load693
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 175
+; CHECK-RV64-NEXT:    li a3, 174
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 16
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_698
+; CHECK-RV64-NEXT:    j .LBB61_180
+; CHECK-RV64-NEXT:  .LBB61_698: # %cond.load697
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 176
+; CHECK-RV64-NEXT:    li a3, 175
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 15
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_699
+; CHECK-RV64-NEXT:    j .LBB61_181
+; CHECK-RV64-NEXT:  .LBB61_699: # %cond.load701
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 177
+; CHECK-RV64-NEXT:    li a3, 176
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 14
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_700
+; CHECK-RV64-NEXT:    j .LBB61_182
+; CHECK-RV64-NEXT:  .LBB61_700: # %cond.load705
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 178
+; CHECK-RV64-NEXT:    li a3, 177
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 13
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_701
+; CHECK-RV64-NEXT:    j .LBB61_183
+; CHECK-RV64-NEXT:  .LBB61_701: # %cond.load709
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 179
+; CHECK-RV64-NEXT:    li a3, 178
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 12
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_702
+; CHECK-RV64-NEXT:    j .LBB61_184
+; CHECK-RV64-NEXT:  .LBB61_702: # %cond.load713
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 180
+; CHECK-RV64-NEXT:    li a3, 179
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 11
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_703
+; CHECK-RV64-NEXT:    j .LBB61_185
+; CHECK-RV64-NEXT:  .LBB61_703: # %cond.load717
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 181
+; CHECK-RV64-NEXT:    li a3, 180
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 10
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_704
+; CHECK-RV64-NEXT:    j .LBB61_186
+; CHECK-RV64-NEXT:  .LBB61_704: # %cond.load721
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 182
+; CHECK-RV64-NEXT:    li a3, 181
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 9
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_705
+; CHECK-RV64-NEXT:    j .LBB61_187
+; CHECK-RV64-NEXT:  .LBB61_705: # %cond.load725
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 183
+; CHECK-RV64-NEXT:    li a3, 182
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 8
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_706
+; CHECK-RV64-NEXT:    j .LBB61_188
+; CHECK-RV64-NEXT:  .LBB61_706: # %cond.load729
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 184
+; CHECK-RV64-NEXT:    li a3, 183
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 7
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_707
+; CHECK-RV64-NEXT:    j .LBB61_189
+; CHECK-RV64-NEXT:  .LBB61_707: # %cond.load733
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 185
+; CHECK-RV64-NEXT:    li a3, 184
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 6
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_708
+; CHECK-RV64-NEXT:    j .LBB61_190
+; CHECK-RV64-NEXT:  .LBB61_708: # %cond.load737
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 186
+; CHECK-RV64-NEXT:    li a3, 185
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 5
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_709
+; CHECK-RV64-NEXT:    j .LBB61_191
+; CHECK-RV64-NEXT:  .LBB61_709: # %cond.load741
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 187
+; CHECK-RV64-NEXT:    li a3, 186
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 4
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_710
+; CHECK-RV64-NEXT:    j .LBB61_192
+; CHECK-RV64-NEXT:  .LBB61_710: # %cond.load745
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 188
+; CHECK-RV64-NEXT:    li a3, 187
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 3
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_711
+; CHECK-RV64-NEXT:    j .LBB61_193
+; CHECK-RV64-NEXT:  .LBB61_711: # %cond.load749
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 189
+; CHECK-RV64-NEXT:    li a3, 188
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a1, a2, 2
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_1027
+; CHECK-RV64-NEXT:    j .LBB61_194
+; CHECK-RV64-NEXT:  .LBB61_1027: # %cond.load749
+; CHECK-RV64-NEXT:    j .LBB61_195
+; CHECK-RV64-NEXT:  .LBB61_712: # %cond.load761
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 192
+; CHECK-RV64-NEXT:    li a3, 191
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a2, a1, 1
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_713
+; CHECK-RV64-NEXT:    j .LBB61_199
+; CHECK-RV64-NEXT:  .LBB61_713: # %cond.load765
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 193
+; CHECK-RV64-NEXT:    li a3, 192
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a2, a1, 2
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_714
+; CHECK-RV64-NEXT:    j .LBB61_200
+; CHECK-RV64-NEXT:  .LBB61_714: # %cond.load769
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 194
+; CHECK-RV64-NEXT:    li a3, 193
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a2, a1, 4
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_715
+; CHECK-RV64-NEXT:    j .LBB61_201
+; CHECK-RV64-NEXT:  .LBB61_715: # %cond.load773
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 195
+; CHECK-RV64-NEXT:    li a3, 194
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a2, a1, 8
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_716
+; CHECK-RV64-NEXT:    j .LBB61_202
+; CHECK-RV64-NEXT:  .LBB61_716: # %cond.load777
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 196
+; CHECK-RV64-NEXT:    li a3, 195
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a2, a1, 16
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_717
+; CHECK-RV64-NEXT:    j .LBB61_203
+; CHECK-RV64-NEXT:  .LBB61_717: # %cond.load781
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 197
+; CHECK-RV64-NEXT:    li a3, 196
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a2, a1, 32
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_718
+; CHECK-RV64-NEXT:    j .LBB61_204
+; CHECK-RV64-NEXT:  .LBB61_718: # %cond.load785
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 198
+; CHECK-RV64-NEXT:    li a3, 197
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a2, a1, 64
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_719
+; CHECK-RV64-NEXT:    j .LBB61_205
+; CHECK-RV64-NEXT:  .LBB61_719: # %cond.load789
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 199
+; CHECK-RV64-NEXT:    li a3, 198
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a2, a1, 128
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_720
+; CHECK-RV64-NEXT:    j .LBB61_206
+; CHECK-RV64-NEXT:  .LBB61_720: # %cond.load793
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 200
+; CHECK-RV64-NEXT:    li a3, 199
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a2, a1, 256
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_721
+; CHECK-RV64-NEXT:    j .LBB61_207
+; CHECK-RV64-NEXT:  .LBB61_721: # %cond.load797
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 201
+; CHECK-RV64-NEXT:    li a3, 200
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a2, a1, 512
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_722
+; CHECK-RV64-NEXT:    j .LBB61_208
+; CHECK-RV64-NEXT:  .LBB61_722: # %cond.load801
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 202
+; CHECK-RV64-NEXT:    li a3, 201
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a2, a1, 1024
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_723
+; CHECK-RV64-NEXT:    j .LBB61_209
+; CHECK-RV64-NEXT:  .LBB61_723: # %cond.load805
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 203
+; CHECK-RV64-NEXT:    li a3, 202
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 52
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_724
+; CHECK-RV64-NEXT:    j .LBB61_210
+; CHECK-RV64-NEXT:  .LBB61_724: # %cond.load809
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 204
+; CHECK-RV64-NEXT:    li a3, 203
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 51
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_725
+; CHECK-RV64-NEXT:    j .LBB61_211
+; CHECK-RV64-NEXT:  .LBB61_725: # %cond.load813
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 205
+; CHECK-RV64-NEXT:    li a3, 204
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 50
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_726
+; CHECK-RV64-NEXT:    j .LBB61_212
+; CHECK-RV64-NEXT:  .LBB61_726: # %cond.load817
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 206
+; CHECK-RV64-NEXT:    li a3, 205
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 49
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_727
+; CHECK-RV64-NEXT:    j .LBB61_213
+; CHECK-RV64-NEXT:  .LBB61_727: # %cond.load821
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 207
+; CHECK-RV64-NEXT:    li a3, 206
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 48
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_728
+; CHECK-RV64-NEXT:    j .LBB61_214
+; CHECK-RV64-NEXT:  .LBB61_728: # %cond.load825
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 208
+; CHECK-RV64-NEXT:    li a3, 207
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 47
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_729
+; CHECK-RV64-NEXT:    j .LBB61_215
+; CHECK-RV64-NEXT:  .LBB61_729: # %cond.load829
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 209
+; CHECK-RV64-NEXT:    li a3, 208
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 46
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_730
+; CHECK-RV64-NEXT:    j .LBB61_216
+; CHECK-RV64-NEXT:  .LBB61_730: # %cond.load833
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 210
+; CHECK-RV64-NEXT:    li a3, 209
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 45
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_731
+; CHECK-RV64-NEXT:    j .LBB61_217
+; CHECK-RV64-NEXT:  .LBB61_731: # %cond.load837
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 211
+; CHECK-RV64-NEXT:    li a3, 210
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 44
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_732
+; CHECK-RV64-NEXT:    j .LBB61_218
+; CHECK-RV64-NEXT:  .LBB61_732: # %cond.load841
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 212
+; CHECK-RV64-NEXT:    li a3, 211
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 43
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_733
+; CHECK-RV64-NEXT:    j .LBB61_219
+; CHECK-RV64-NEXT:  .LBB61_733: # %cond.load845
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 213
+; CHECK-RV64-NEXT:    li a3, 212
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 42
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_734
+; CHECK-RV64-NEXT:    j .LBB61_220
+; CHECK-RV64-NEXT:  .LBB61_734: # %cond.load849
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 214
+; CHECK-RV64-NEXT:    li a3, 213
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 41
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_735
+; CHECK-RV64-NEXT:    j .LBB61_221
+; CHECK-RV64-NEXT:  .LBB61_735: # %cond.load853
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 215
+; CHECK-RV64-NEXT:    li a3, 214
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 40
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_736
+; CHECK-RV64-NEXT:    j .LBB61_222
+; CHECK-RV64-NEXT:  .LBB61_736: # %cond.load857
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 216
+; CHECK-RV64-NEXT:    li a3, 215
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 39
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_737
+; CHECK-RV64-NEXT:    j .LBB61_223
+; CHECK-RV64-NEXT:  .LBB61_737: # %cond.load861
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 217
+; CHECK-RV64-NEXT:    li a3, 216
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 38
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_738
+; CHECK-RV64-NEXT:    j .LBB61_224
+; CHECK-RV64-NEXT:  .LBB61_738: # %cond.load865
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 218
+; CHECK-RV64-NEXT:    li a3, 217
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 37
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_739
+; CHECK-RV64-NEXT:    j .LBB61_225
+; CHECK-RV64-NEXT:  .LBB61_739: # %cond.load869
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 219
+; CHECK-RV64-NEXT:    li a3, 218
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 36
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_740
+; CHECK-RV64-NEXT:    j .LBB61_226
+; CHECK-RV64-NEXT:  .LBB61_740: # %cond.load873
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 220
+; CHECK-RV64-NEXT:    li a3, 219
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 35
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_741
+; CHECK-RV64-NEXT:    j .LBB61_227
+; CHECK-RV64-NEXT:  .LBB61_741: # %cond.load877
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 221
+; CHECK-RV64-NEXT:    li a3, 220
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 34
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_742
+; CHECK-RV64-NEXT:    j .LBB61_228
+; CHECK-RV64-NEXT:  .LBB61_742: # %cond.load881
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 222
+; CHECK-RV64-NEXT:    li a3, 221
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 33
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_743
+; CHECK-RV64-NEXT:    j .LBB61_229
+; CHECK-RV64-NEXT:  .LBB61_743: # %cond.load885
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 223
+; CHECK-RV64-NEXT:    li a3, 222
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 32
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_744
+; CHECK-RV64-NEXT:    j .LBB61_230
+; CHECK-RV64-NEXT:  .LBB61_744: # %cond.load889
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 224
+; CHECK-RV64-NEXT:    li a3, 223
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 31
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_745
+; CHECK-RV64-NEXT:    j .LBB61_231
+; CHECK-RV64-NEXT:  .LBB61_745: # %cond.load893
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 225
+; CHECK-RV64-NEXT:    li a3, 224
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 30
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_746
+; CHECK-RV64-NEXT:    j .LBB61_232
+; CHECK-RV64-NEXT:  .LBB61_746: # %cond.load897
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 226
+; CHECK-RV64-NEXT:    li a3, 225
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 29
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_747
+; CHECK-RV64-NEXT:    j .LBB61_233
+; CHECK-RV64-NEXT:  .LBB61_747: # %cond.load901
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 227
+; CHECK-RV64-NEXT:    li a3, 226
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 28
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_748
+; CHECK-RV64-NEXT:    j .LBB61_234
+; CHECK-RV64-NEXT:  .LBB61_748: # %cond.load905
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 228
+; CHECK-RV64-NEXT:    li a3, 227
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 27
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_749
+; CHECK-RV64-NEXT:    j .LBB61_235
+; CHECK-RV64-NEXT:  .LBB61_749: # %cond.load909
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 229
+; CHECK-RV64-NEXT:    li a3, 228
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 26
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_750
+; CHECK-RV64-NEXT:    j .LBB61_236
+; CHECK-RV64-NEXT:  .LBB61_750: # %cond.load913
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 230
+; CHECK-RV64-NEXT:    li a3, 229
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 25
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_751
+; CHECK-RV64-NEXT:    j .LBB61_237
+; CHECK-RV64-NEXT:  .LBB61_751: # %cond.load917
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 231
+; CHECK-RV64-NEXT:    li a3, 230
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 24
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_752
+; CHECK-RV64-NEXT:    j .LBB61_238
+; CHECK-RV64-NEXT:  .LBB61_752: # %cond.load921
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 232
+; CHECK-RV64-NEXT:    li a3, 231
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 23
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_753
+; CHECK-RV64-NEXT:    j .LBB61_239
+; CHECK-RV64-NEXT:  .LBB61_753: # %cond.load925
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 233
+; CHECK-RV64-NEXT:    li a3, 232
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 22
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_754
+; CHECK-RV64-NEXT:    j .LBB61_240
+; CHECK-RV64-NEXT:  .LBB61_754: # %cond.load929
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 234
+; CHECK-RV64-NEXT:    li a3, 233
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 21
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_755
+; CHECK-RV64-NEXT:    j .LBB61_241
+; CHECK-RV64-NEXT:  .LBB61_755: # %cond.load933
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 235
+; CHECK-RV64-NEXT:    li a3, 234
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 20
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_756
+; CHECK-RV64-NEXT:    j .LBB61_242
+; CHECK-RV64-NEXT:  .LBB61_756: # %cond.load937
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 236
+; CHECK-RV64-NEXT:    li a3, 235
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 19
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_757
+; CHECK-RV64-NEXT:    j .LBB61_243
+; CHECK-RV64-NEXT:  .LBB61_757: # %cond.load941
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 237
+; CHECK-RV64-NEXT:    li a3, 236
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 18
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_758
+; CHECK-RV64-NEXT:    j .LBB61_244
+; CHECK-RV64-NEXT:  .LBB61_758: # %cond.load945
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 238
+; CHECK-RV64-NEXT:    li a3, 237
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 17
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_759
+; CHECK-RV64-NEXT:    j .LBB61_245
+; CHECK-RV64-NEXT:  .LBB61_759: # %cond.load949
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 239
+; CHECK-RV64-NEXT:    li a3, 238
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 16
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_760
+; CHECK-RV64-NEXT:    j .LBB61_246
+; CHECK-RV64-NEXT:  .LBB61_760: # %cond.load953
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 240
+; CHECK-RV64-NEXT:    li a3, 239
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 15
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_761
+; CHECK-RV64-NEXT:    j .LBB61_247
+; CHECK-RV64-NEXT:  .LBB61_761: # %cond.load957
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 241
+; CHECK-RV64-NEXT:    li a3, 240
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 14
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_762
+; CHECK-RV64-NEXT:    j .LBB61_248
+; CHECK-RV64-NEXT:  .LBB61_762: # %cond.load961
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 242
+; CHECK-RV64-NEXT:    li a3, 241
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 13
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_763
+; CHECK-RV64-NEXT:    j .LBB61_249
+; CHECK-RV64-NEXT:  .LBB61_763: # %cond.load965
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 243
+; CHECK-RV64-NEXT:    li a3, 242
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 12
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_764
+; CHECK-RV64-NEXT:    j .LBB61_250
+; CHECK-RV64-NEXT:  .LBB61_764: # %cond.load969
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 244
+; CHECK-RV64-NEXT:    li a3, 243
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 11
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_765
+; CHECK-RV64-NEXT:    j .LBB61_251
+; CHECK-RV64-NEXT:  .LBB61_765: # %cond.load973
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 245
+; CHECK-RV64-NEXT:    li a3, 244
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 10
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_766
+; CHECK-RV64-NEXT:    j .LBB61_252
+; CHECK-RV64-NEXT:  .LBB61_766: # %cond.load977
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 246
+; CHECK-RV64-NEXT:    li a3, 245
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 9
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_767
+; CHECK-RV64-NEXT:    j .LBB61_253
+; CHECK-RV64-NEXT:  .LBB61_767: # %cond.load981
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 247
+; CHECK-RV64-NEXT:    li a3, 246
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 8
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_768
+; CHECK-RV64-NEXT:    j .LBB61_254
+; CHECK-RV64-NEXT:  .LBB61_768: # %cond.load985
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 248
+; CHECK-RV64-NEXT:    li a3, 247
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 7
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_769
+; CHECK-RV64-NEXT:    j .LBB61_255
+; CHECK-RV64-NEXT:  .LBB61_769: # %cond.load989
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 249
+; CHECK-RV64-NEXT:    li a3, 248
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 6
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_770
+; CHECK-RV64-NEXT:    j .LBB61_256
+; CHECK-RV64-NEXT:  .LBB61_770: # %cond.load993
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 250
+; CHECK-RV64-NEXT:    li a3, 249
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 5
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_771
+; CHECK-RV64-NEXT:    j .LBB61_257
+; CHECK-RV64-NEXT:  .LBB61_771: # %cond.load997
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 251
+; CHECK-RV64-NEXT:    li a3, 250
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 4
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_772
+; CHECK-RV64-NEXT:    j .LBB61_258
+; CHECK-RV64-NEXT:  .LBB61_772: # %cond.load1001
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 252
+; CHECK-RV64-NEXT:    li a3, 251
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 3
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_773
+; CHECK-RV64-NEXT:    j .LBB61_259
+; CHECK-RV64-NEXT:  .LBB61_773: # %cond.load1005
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a2, 253
+; CHECK-RV64-NEXT:    li a3, 252
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    slli a2, a1, 2
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_1028
+; CHECK-RV64-NEXT:    j .LBB61_260
+; CHECK-RV64-NEXT:  .LBB61_1028: # %cond.load1005
+; CHECK-RV64-NEXT:    j .LBB61_261
+; CHECK-RV64-NEXT:  .LBB61_774: # %cond.load1017
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    vmv8r.v v24, v8
+; CHECK-RV64-NEXT:    li a1, 256
+; CHECK-RV64-NEXT:    li a3, 255
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m4, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    vmv4r.v v24, v8
+; CHECK-RV64-NEXT:    vmv8r.v v8, v24
+; CHECK-RV64-NEXT:    andi a1, a2, 1
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_775
+; CHECK-RV64-NEXT:    j .LBB61_265
+; CHECK-RV64-NEXT:  .LBB61_775: # %cond.load1021
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 257
+; CHECK-RV64-NEXT:    li a3, 256
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 2
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_776
+; CHECK-RV64-NEXT:    j .LBB61_266
+; CHECK-RV64-NEXT:  .LBB61_776: # %cond.load1025
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 258
+; CHECK-RV64-NEXT:    li a3, 257
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 4
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_777
+; CHECK-RV64-NEXT:    j .LBB61_267
+; CHECK-RV64-NEXT:  .LBB61_777: # %cond.load1029
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 259
+; CHECK-RV64-NEXT:    li a3, 258
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 8
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_778
+; CHECK-RV64-NEXT:    j .LBB61_268
+; CHECK-RV64-NEXT:  .LBB61_778: # %cond.load1033
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 260
+; CHECK-RV64-NEXT:    li a3, 259
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 16
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_779
+; CHECK-RV64-NEXT:    j .LBB61_269
+; CHECK-RV64-NEXT:  .LBB61_779: # %cond.load1037
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 261
+; CHECK-RV64-NEXT:    li a3, 260
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 32
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_780
+; CHECK-RV64-NEXT:    j .LBB61_270
+; CHECK-RV64-NEXT:  .LBB61_780: # %cond.load1041
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 262
+; CHECK-RV64-NEXT:    li a3, 261
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 64
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_781
+; CHECK-RV64-NEXT:    j .LBB61_271
+; CHECK-RV64-NEXT:  .LBB61_781: # %cond.load1045
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 263
+; CHECK-RV64-NEXT:    li a3, 262
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 128
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_782
+; CHECK-RV64-NEXT:    j .LBB61_272
+; CHECK-RV64-NEXT:  .LBB61_782: # %cond.load1049
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 264
+; CHECK-RV64-NEXT:    li a3, 263
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 256
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_783
+; CHECK-RV64-NEXT:    j .LBB61_273
+; CHECK-RV64-NEXT:  .LBB61_783: # %cond.load1053
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 265
+; CHECK-RV64-NEXT:    li a3, 264
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 512
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_784
+; CHECK-RV64-NEXT:    j .LBB61_274
+; CHECK-RV64-NEXT:  .LBB61_784: # %cond.load1057
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 266
+; CHECK-RV64-NEXT:    li a3, 265
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 1024
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_785
+; CHECK-RV64-NEXT:    j .LBB61_275
+; CHECK-RV64-NEXT:  .LBB61_785: # %cond.load1061
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 267
+; CHECK-RV64-NEXT:    li a3, 266
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 52
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_786
+; CHECK-RV64-NEXT:    j .LBB61_276
+; CHECK-RV64-NEXT:  .LBB61_786: # %cond.load1065
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 268
+; CHECK-RV64-NEXT:    li a3, 267
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 51
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_787
+; CHECK-RV64-NEXT:    j .LBB61_277
+; CHECK-RV64-NEXT:  .LBB61_787: # %cond.load1069
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 269
+; CHECK-RV64-NEXT:    li a3, 268
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 50
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_788
+; CHECK-RV64-NEXT:    j .LBB61_278
+; CHECK-RV64-NEXT:  .LBB61_788: # %cond.load1073
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 270
+; CHECK-RV64-NEXT:    li a3, 269
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 49
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_789
+; CHECK-RV64-NEXT:    j .LBB61_279
+; CHECK-RV64-NEXT:  .LBB61_789: # %cond.load1077
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 271
+; CHECK-RV64-NEXT:    li a3, 270
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 48
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_790
+; CHECK-RV64-NEXT:    j .LBB61_280
+; CHECK-RV64-NEXT:  .LBB61_790: # %cond.load1081
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 272
+; CHECK-RV64-NEXT:    li a3, 271
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 47
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_791
+; CHECK-RV64-NEXT:    j .LBB61_281
+; CHECK-RV64-NEXT:  .LBB61_791: # %cond.load1085
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 273
+; CHECK-RV64-NEXT:    li a3, 272
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 46
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_792
+; CHECK-RV64-NEXT:    j .LBB61_282
+; CHECK-RV64-NEXT:  .LBB61_792: # %cond.load1089
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 274
+; CHECK-RV64-NEXT:    li a3, 273
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 45
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_793
+; CHECK-RV64-NEXT:    j .LBB61_283
+; CHECK-RV64-NEXT:  .LBB61_793: # %cond.load1093
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 275
+; CHECK-RV64-NEXT:    li a3, 274
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 44
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_794
+; CHECK-RV64-NEXT:    j .LBB61_284
+; CHECK-RV64-NEXT:  .LBB61_794: # %cond.load1097
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 276
+; CHECK-RV64-NEXT:    li a3, 275
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 43
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_795
+; CHECK-RV64-NEXT:    j .LBB61_285
+; CHECK-RV64-NEXT:  .LBB61_795: # %cond.load1101
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 277
+; CHECK-RV64-NEXT:    li a3, 276
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 42
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_796
+; CHECK-RV64-NEXT:    j .LBB61_286
+; CHECK-RV64-NEXT:  .LBB61_796: # %cond.load1105
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 278
+; CHECK-RV64-NEXT:    li a3, 277
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 41
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_797
+; CHECK-RV64-NEXT:    j .LBB61_287
+; CHECK-RV64-NEXT:  .LBB61_797: # %cond.load1109
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 279
+; CHECK-RV64-NEXT:    li a3, 278
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 40
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_798
+; CHECK-RV64-NEXT:    j .LBB61_288
+; CHECK-RV64-NEXT:  .LBB61_798: # %cond.load1113
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 280
+; CHECK-RV64-NEXT:    li a3, 279
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 39
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_799
+; CHECK-RV64-NEXT:    j .LBB61_289
+; CHECK-RV64-NEXT:  .LBB61_799: # %cond.load1117
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 281
+; CHECK-RV64-NEXT:    li a3, 280
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 38
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_800
+; CHECK-RV64-NEXT:    j .LBB61_290
+; CHECK-RV64-NEXT:  .LBB61_800: # %cond.load1121
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 282
+; CHECK-RV64-NEXT:    li a3, 281
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 37
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_801
+; CHECK-RV64-NEXT:    j .LBB61_291
+; CHECK-RV64-NEXT:  .LBB61_801: # %cond.load1125
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 283
+; CHECK-RV64-NEXT:    li a3, 282
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 36
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_802
+; CHECK-RV64-NEXT:    j .LBB61_292
+; CHECK-RV64-NEXT:  .LBB61_802: # %cond.load1129
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 284
+; CHECK-RV64-NEXT:    li a3, 283
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 35
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_803
+; CHECK-RV64-NEXT:    j .LBB61_293
+; CHECK-RV64-NEXT:  .LBB61_803: # %cond.load1133
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 285
+; CHECK-RV64-NEXT:    li a3, 284
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 34
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_804
+; CHECK-RV64-NEXT:    j .LBB61_294
+; CHECK-RV64-NEXT:  .LBB61_804: # %cond.load1137
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 286
+; CHECK-RV64-NEXT:    li a3, 285
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 33
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_805
+; CHECK-RV64-NEXT:    j .LBB61_295
+; CHECK-RV64-NEXT:  .LBB61_805: # %cond.load1141
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 287
+; CHECK-RV64-NEXT:    li a3, 286
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 32
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_806
+; CHECK-RV64-NEXT:    j .LBB61_296
+; CHECK-RV64-NEXT:  .LBB61_806: # %cond.load1145
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 288
+; CHECK-RV64-NEXT:    li a3, 287
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 31
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_807
+; CHECK-RV64-NEXT:    j .LBB61_297
+; CHECK-RV64-NEXT:  .LBB61_807: # %cond.load1149
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 289
+; CHECK-RV64-NEXT:    li a3, 288
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 30
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_808
+; CHECK-RV64-NEXT:    j .LBB61_298
+; CHECK-RV64-NEXT:  .LBB61_808: # %cond.load1153
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 290
+; CHECK-RV64-NEXT:    li a3, 289
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 29
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_809
+; CHECK-RV64-NEXT:    j .LBB61_299
+; CHECK-RV64-NEXT:  .LBB61_809: # %cond.load1157
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 291
+; CHECK-RV64-NEXT:    li a3, 290
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 28
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_810
+; CHECK-RV64-NEXT:    j .LBB61_300
+; CHECK-RV64-NEXT:  .LBB61_810: # %cond.load1161
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 292
+; CHECK-RV64-NEXT:    li a3, 291
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 27
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_811
+; CHECK-RV64-NEXT:    j .LBB61_301
+; CHECK-RV64-NEXT:  .LBB61_811: # %cond.load1165
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 293
+; CHECK-RV64-NEXT:    li a3, 292
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 26
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_812
+; CHECK-RV64-NEXT:    j .LBB61_302
+; CHECK-RV64-NEXT:  .LBB61_812: # %cond.load1169
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 294
+; CHECK-RV64-NEXT:    li a3, 293
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 25
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_813
+; CHECK-RV64-NEXT:    j .LBB61_303
+; CHECK-RV64-NEXT:  .LBB61_813: # %cond.load1173
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 295
+; CHECK-RV64-NEXT:    li a3, 294
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 24
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_814
+; CHECK-RV64-NEXT:    j .LBB61_304
+; CHECK-RV64-NEXT:  .LBB61_814: # %cond.load1177
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 296
+; CHECK-RV64-NEXT:    li a3, 295
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 23
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_815
+; CHECK-RV64-NEXT:    j .LBB61_305
+; CHECK-RV64-NEXT:  .LBB61_815: # %cond.load1181
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 297
+; CHECK-RV64-NEXT:    li a3, 296
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 22
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_816
+; CHECK-RV64-NEXT:    j .LBB61_306
+; CHECK-RV64-NEXT:  .LBB61_816: # %cond.load1185
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 298
+; CHECK-RV64-NEXT:    li a3, 297
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 21
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_817
+; CHECK-RV64-NEXT:    j .LBB61_307
+; CHECK-RV64-NEXT:  .LBB61_817: # %cond.load1189
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 299
+; CHECK-RV64-NEXT:    li a3, 298
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 20
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_818
+; CHECK-RV64-NEXT:    j .LBB61_308
+; CHECK-RV64-NEXT:  .LBB61_818: # %cond.load1193
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 300
+; CHECK-RV64-NEXT:    li a3, 299
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 19
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_819
+; CHECK-RV64-NEXT:    j .LBB61_309
+; CHECK-RV64-NEXT:  .LBB61_819: # %cond.load1197
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 301
+; CHECK-RV64-NEXT:    li a3, 300
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 18
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_820
+; CHECK-RV64-NEXT:    j .LBB61_310
+; CHECK-RV64-NEXT:  .LBB61_820: # %cond.load1201
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 302
+; CHECK-RV64-NEXT:    li a3, 301
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 17
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_821
+; CHECK-RV64-NEXT:    j .LBB61_311
+; CHECK-RV64-NEXT:  .LBB61_821: # %cond.load1205
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 303
+; CHECK-RV64-NEXT:    li a3, 302
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 16
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_822
+; CHECK-RV64-NEXT:    j .LBB61_312
+; CHECK-RV64-NEXT:  .LBB61_822: # %cond.load1209
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 304
+; CHECK-RV64-NEXT:    li a3, 303
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 15
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_823
+; CHECK-RV64-NEXT:    j .LBB61_313
+; CHECK-RV64-NEXT:  .LBB61_823: # %cond.load1213
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 305
+; CHECK-RV64-NEXT:    li a3, 304
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 14
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_824
+; CHECK-RV64-NEXT:    j .LBB61_314
+; CHECK-RV64-NEXT:  .LBB61_824: # %cond.load1217
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 306
+; CHECK-RV64-NEXT:    li a3, 305
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 13
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_825
+; CHECK-RV64-NEXT:    j .LBB61_315
+; CHECK-RV64-NEXT:  .LBB61_825: # %cond.load1221
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 307
+; CHECK-RV64-NEXT:    li a3, 306
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 12
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_826
+; CHECK-RV64-NEXT:    j .LBB61_316
+; CHECK-RV64-NEXT:  .LBB61_826: # %cond.load1225
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 308
+; CHECK-RV64-NEXT:    li a3, 307
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 11
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_827
+; CHECK-RV64-NEXT:    j .LBB61_317
+; CHECK-RV64-NEXT:  .LBB61_827: # %cond.load1229
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 309
+; CHECK-RV64-NEXT:    li a3, 308
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 10
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_828
+; CHECK-RV64-NEXT:    j .LBB61_318
+; CHECK-RV64-NEXT:  .LBB61_828: # %cond.load1233
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 310
+; CHECK-RV64-NEXT:    li a3, 309
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 9
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_829
+; CHECK-RV64-NEXT:    j .LBB61_319
+; CHECK-RV64-NEXT:  .LBB61_829: # %cond.load1237
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 311
+; CHECK-RV64-NEXT:    li a3, 310
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 8
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_830
+; CHECK-RV64-NEXT:    j .LBB61_320
+; CHECK-RV64-NEXT:  .LBB61_830: # %cond.load1241
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 312
+; CHECK-RV64-NEXT:    li a3, 311
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 7
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_831
+; CHECK-RV64-NEXT:    j .LBB61_321
+; CHECK-RV64-NEXT:  .LBB61_831: # %cond.load1245
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 313
+; CHECK-RV64-NEXT:    li a3, 312
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 6
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_832
+; CHECK-RV64-NEXT:    j .LBB61_322
+; CHECK-RV64-NEXT:  .LBB61_832: # %cond.load1249
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 314
+; CHECK-RV64-NEXT:    li a3, 313
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 5
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_833
+; CHECK-RV64-NEXT:    j .LBB61_323
+; CHECK-RV64-NEXT:  .LBB61_833: # %cond.load1253
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 315
+; CHECK-RV64-NEXT:    li a3, 314
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 4
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_834
+; CHECK-RV64-NEXT:    j .LBB61_324
+; CHECK-RV64-NEXT:  .LBB61_834: # %cond.load1257
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 316
+; CHECK-RV64-NEXT:    li a3, 315
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 3
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_835
+; CHECK-RV64-NEXT:    j .LBB61_325
+; CHECK-RV64-NEXT:  .LBB61_835: # %cond.load1261
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 317
+; CHECK-RV64-NEXT:    li a3, 316
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 2
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_1029
+; CHECK-RV64-NEXT:    j .LBB61_326
+; CHECK-RV64-NEXT:  .LBB61_1029: # %cond.load1261
+; CHECK-RV64-NEXT:    j .LBB61_327
+; CHECK-RV64-NEXT:  .LBB61_836: # %cond.load1273
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 320
+; CHECK-RV64-NEXT:    li a3, 319
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 1
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_837
+; CHECK-RV64-NEXT:    j .LBB61_331
+; CHECK-RV64-NEXT:  .LBB61_837: # %cond.load1277
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 321
+; CHECK-RV64-NEXT:    li a3, 320
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 2
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_838
+; CHECK-RV64-NEXT:    j .LBB61_332
+; CHECK-RV64-NEXT:  .LBB61_838: # %cond.load1281
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 322
+; CHECK-RV64-NEXT:    li a3, 321
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 4
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_839
+; CHECK-RV64-NEXT:    j .LBB61_333
+; CHECK-RV64-NEXT:  .LBB61_839: # %cond.load1285
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 323
+; CHECK-RV64-NEXT:    li a3, 322
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 8
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_840
+; CHECK-RV64-NEXT:    j .LBB61_334
+; CHECK-RV64-NEXT:  .LBB61_840: # %cond.load1289
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 324
+; CHECK-RV64-NEXT:    li a3, 323
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 16
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_841
+; CHECK-RV64-NEXT:    j .LBB61_335
+; CHECK-RV64-NEXT:  .LBB61_841: # %cond.load1293
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 325
+; CHECK-RV64-NEXT:    li a3, 324
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 32
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_842
+; CHECK-RV64-NEXT:    j .LBB61_336
+; CHECK-RV64-NEXT:  .LBB61_842: # %cond.load1297
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 326
+; CHECK-RV64-NEXT:    li a3, 325
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 64
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_843
+; CHECK-RV64-NEXT:    j .LBB61_337
+; CHECK-RV64-NEXT:  .LBB61_843: # %cond.load1301
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 327
+; CHECK-RV64-NEXT:    li a3, 326
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 128
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_844
+; CHECK-RV64-NEXT:    j .LBB61_338
+; CHECK-RV64-NEXT:  .LBB61_844: # %cond.load1305
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 328
+; CHECK-RV64-NEXT:    li a3, 327
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 256
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_845
+; CHECK-RV64-NEXT:    j .LBB61_339
+; CHECK-RV64-NEXT:  .LBB61_845: # %cond.load1309
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 329
+; CHECK-RV64-NEXT:    li a3, 328
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 512
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_846
+; CHECK-RV64-NEXT:    j .LBB61_340
+; CHECK-RV64-NEXT:  .LBB61_846: # %cond.load1313
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 330
+; CHECK-RV64-NEXT:    li a3, 329
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 1024
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_847
+; CHECK-RV64-NEXT:    j .LBB61_341
+; CHECK-RV64-NEXT:  .LBB61_847: # %cond.load1317
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 331
+; CHECK-RV64-NEXT:    li a3, 330
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 52
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_848
+; CHECK-RV64-NEXT:    j .LBB61_342
+; CHECK-RV64-NEXT:  .LBB61_848: # %cond.load1321
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 332
+; CHECK-RV64-NEXT:    li a3, 331
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 51
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_849
+; CHECK-RV64-NEXT:    j .LBB61_343
+; CHECK-RV64-NEXT:  .LBB61_849: # %cond.load1325
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 333
+; CHECK-RV64-NEXT:    li a3, 332
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 50
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_850
+; CHECK-RV64-NEXT:    j .LBB61_344
+; CHECK-RV64-NEXT:  .LBB61_850: # %cond.load1329
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 334
+; CHECK-RV64-NEXT:    li a3, 333
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 49
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_851
+; CHECK-RV64-NEXT:    j .LBB61_345
+; CHECK-RV64-NEXT:  .LBB61_851: # %cond.load1333
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 335
+; CHECK-RV64-NEXT:    li a3, 334
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 48
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_852
+; CHECK-RV64-NEXT:    j .LBB61_346
+; CHECK-RV64-NEXT:  .LBB61_852: # %cond.load1337
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 336
+; CHECK-RV64-NEXT:    li a3, 335
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 47
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_853
+; CHECK-RV64-NEXT:    j .LBB61_347
+; CHECK-RV64-NEXT:  .LBB61_853: # %cond.load1341
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 337
+; CHECK-RV64-NEXT:    li a3, 336
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 46
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_854
+; CHECK-RV64-NEXT:    j .LBB61_348
+; CHECK-RV64-NEXT:  .LBB61_854: # %cond.load1345
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 338
+; CHECK-RV64-NEXT:    li a3, 337
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 45
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_855
+; CHECK-RV64-NEXT:    j .LBB61_349
+; CHECK-RV64-NEXT:  .LBB61_855: # %cond.load1349
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 339
+; CHECK-RV64-NEXT:    li a3, 338
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 44
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_856
+; CHECK-RV64-NEXT:    j .LBB61_350
+; CHECK-RV64-NEXT:  .LBB61_856: # %cond.load1353
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 340
+; CHECK-RV64-NEXT:    li a3, 339
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 43
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_857
+; CHECK-RV64-NEXT:    j .LBB61_351
+; CHECK-RV64-NEXT:  .LBB61_857: # %cond.load1357
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 341
+; CHECK-RV64-NEXT:    li a3, 340
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 42
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_858
+; CHECK-RV64-NEXT:    j .LBB61_352
+; CHECK-RV64-NEXT:  .LBB61_858: # %cond.load1361
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 342
+; CHECK-RV64-NEXT:    li a3, 341
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 41
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_859
+; CHECK-RV64-NEXT:    j .LBB61_353
+; CHECK-RV64-NEXT:  .LBB61_859: # %cond.load1365
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 343
+; CHECK-RV64-NEXT:    li a3, 342
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 40
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_860
+; CHECK-RV64-NEXT:    j .LBB61_354
+; CHECK-RV64-NEXT:  .LBB61_860: # %cond.load1369
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 344
+; CHECK-RV64-NEXT:    li a3, 343
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 39
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_861
+; CHECK-RV64-NEXT:    j .LBB61_355
+; CHECK-RV64-NEXT:  .LBB61_861: # %cond.load1373
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 345
+; CHECK-RV64-NEXT:    li a3, 344
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 38
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_862
+; CHECK-RV64-NEXT:    j .LBB61_356
+; CHECK-RV64-NEXT:  .LBB61_862: # %cond.load1377
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 346
+; CHECK-RV64-NEXT:    li a3, 345
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 37
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_863
+; CHECK-RV64-NEXT:    j .LBB61_357
+; CHECK-RV64-NEXT:  .LBB61_863: # %cond.load1381
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 347
+; CHECK-RV64-NEXT:    li a3, 346
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 36
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_864
+; CHECK-RV64-NEXT:    j .LBB61_358
+; CHECK-RV64-NEXT:  .LBB61_864: # %cond.load1385
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 348
+; CHECK-RV64-NEXT:    li a3, 347
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 35
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_865
+; CHECK-RV64-NEXT:    j .LBB61_359
+; CHECK-RV64-NEXT:  .LBB61_865: # %cond.load1389
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 349
+; CHECK-RV64-NEXT:    li a3, 348
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 34
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_866
+; CHECK-RV64-NEXT:    j .LBB61_360
+; CHECK-RV64-NEXT:  .LBB61_866: # %cond.load1393
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 350
+; CHECK-RV64-NEXT:    li a3, 349
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 33
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_867
+; CHECK-RV64-NEXT:    j .LBB61_361
+; CHECK-RV64-NEXT:  .LBB61_867: # %cond.load1397
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 351
+; CHECK-RV64-NEXT:    li a3, 350
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 32
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_868
+; CHECK-RV64-NEXT:    j .LBB61_362
+; CHECK-RV64-NEXT:  .LBB61_868: # %cond.load1401
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 352
+; CHECK-RV64-NEXT:    li a3, 351
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 31
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_869
+; CHECK-RV64-NEXT:    j .LBB61_363
+; CHECK-RV64-NEXT:  .LBB61_869: # %cond.load1405
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 353
+; CHECK-RV64-NEXT:    li a3, 352
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 30
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_870
+; CHECK-RV64-NEXT:    j .LBB61_364
+; CHECK-RV64-NEXT:  .LBB61_870: # %cond.load1409
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 354
+; CHECK-RV64-NEXT:    li a3, 353
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 29
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_871
+; CHECK-RV64-NEXT:    j .LBB61_365
+; CHECK-RV64-NEXT:  .LBB61_871: # %cond.load1413
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 355
+; CHECK-RV64-NEXT:    li a3, 354
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 28
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_872
+; CHECK-RV64-NEXT:    j .LBB61_366
+; CHECK-RV64-NEXT:  .LBB61_872: # %cond.load1417
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 356
+; CHECK-RV64-NEXT:    li a3, 355
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 27
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_873
+; CHECK-RV64-NEXT:    j .LBB61_367
+; CHECK-RV64-NEXT:  .LBB61_873: # %cond.load1421
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 357
+; CHECK-RV64-NEXT:    li a3, 356
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 26
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_874
+; CHECK-RV64-NEXT:    j .LBB61_368
+; CHECK-RV64-NEXT:  .LBB61_874: # %cond.load1425
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 358
+; CHECK-RV64-NEXT:    li a3, 357
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 25
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_875
+; CHECK-RV64-NEXT:    j .LBB61_369
+; CHECK-RV64-NEXT:  .LBB61_875: # %cond.load1429
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 359
+; CHECK-RV64-NEXT:    li a3, 358
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 24
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_876
+; CHECK-RV64-NEXT:    j .LBB61_370
+; CHECK-RV64-NEXT:  .LBB61_876: # %cond.load1433
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 360
+; CHECK-RV64-NEXT:    li a3, 359
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 23
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_877
+; CHECK-RV64-NEXT:    j .LBB61_371
+; CHECK-RV64-NEXT:  .LBB61_877: # %cond.load1437
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 361
+; CHECK-RV64-NEXT:    li a3, 360
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 22
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_878
+; CHECK-RV64-NEXT:    j .LBB61_372
+; CHECK-RV64-NEXT:  .LBB61_878: # %cond.load1441
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 362
+; CHECK-RV64-NEXT:    li a3, 361
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 21
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_879
+; CHECK-RV64-NEXT:    j .LBB61_373
+; CHECK-RV64-NEXT:  .LBB61_879: # %cond.load1445
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 363
+; CHECK-RV64-NEXT:    li a3, 362
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 20
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_880
+; CHECK-RV64-NEXT:    j .LBB61_374
+; CHECK-RV64-NEXT:  .LBB61_880: # %cond.load1449
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 364
+; CHECK-RV64-NEXT:    li a3, 363
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 19
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_881
+; CHECK-RV64-NEXT:    j .LBB61_375
+; CHECK-RV64-NEXT:  .LBB61_881: # %cond.load1453
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 365
+; CHECK-RV64-NEXT:    li a3, 364
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 18
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_882
+; CHECK-RV64-NEXT:    j .LBB61_376
+; CHECK-RV64-NEXT:  .LBB61_882: # %cond.load1457
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 366
+; CHECK-RV64-NEXT:    li a3, 365
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 17
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_883
+; CHECK-RV64-NEXT:    j .LBB61_377
+; CHECK-RV64-NEXT:  .LBB61_883: # %cond.load1461
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 367
+; CHECK-RV64-NEXT:    li a3, 366
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 16
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_884
+; CHECK-RV64-NEXT:    j .LBB61_378
+; CHECK-RV64-NEXT:  .LBB61_884: # %cond.load1465
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 368
+; CHECK-RV64-NEXT:    li a3, 367
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 15
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_885
+; CHECK-RV64-NEXT:    j .LBB61_379
+; CHECK-RV64-NEXT:  .LBB61_885: # %cond.load1469
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 369
+; CHECK-RV64-NEXT:    li a3, 368
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 14
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_886
+; CHECK-RV64-NEXT:    j .LBB61_380
+; CHECK-RV64-NEXT:  .LBB61_886: # %cond.load1473
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 370
+; CHECK-RV64-NEXT:    li a3, 369
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 13
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_887
+; CHECK-RV64-NEXT:    j .LBB61_381
+; CHECK-RV64-NEXT:  .LBB61_887: # %cond.load1477
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 371
+; CHECK-RV64-NEXT:    li a3, 370
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 12
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_888
+; CHECK-RV64-NEXT:    j .LBB61_382
+; CHECK-RV64-NEXT:  .LBB61_888: # %cond.load1481
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 372
+; CHECK-RV64-NEXT:    li a3, 371
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 11
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_889
+; CHECK-RV64-NEXT:    j .LBB61_383
+; CHECK-RV64-NEXT:  .LBB61_889: # %cond.load1485
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 373
+; CHECK-RV64-NEXT:    li a3, 372
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 10
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_890
+; CHECK-RV64-NEXT:    j .LBB61_384
+; CHECK-RV64-NEXT:  .LBB61_890: # %cond.load1489
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 374
+; CHECK-RV64-NEXT:    li a3, 373
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 9
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_891
+; CHECK-RV64-NEXT:    j .LBB61_385
+; CHECK-RV64-NEXT:  .LBB61_891: # %cond.load1493
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 375
+; CHECK-RV64-NEXT:    li a3, 374
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 8
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_892
+; CHECK-RV64-NEXT:    j .LBB61_386
+; CHECK-RV64-NEXT:  .LBB61_892: # %cond.load1497
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 376
+; CHECK-RV64-NEXT:    li a3, 375
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 7
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_893
+; CHECK-RV64-NEXT:    j .LBB61_387
+; CHECK-RV64-NEXT:  .LBB61_893: # %cond.load1501
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 377
+; CHECK-RV64-NEXT:    li a3, 376
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 6
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_894
+; CHECK-RV64-NEXT:    j .LBB61_388
+; CHECK-RV64-NEXT:  .LBB61_894: # %cond.load1505
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 378
+; CHECK-RV64-NEXT:    li a3, 377
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 5
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_895
+; CHECK-RV64-NEXT:    j .LBB61_389
+; CHECK-RV64-NEXT:  .LBB61_895: # %cond.load1509
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 379
+; CHECK-RV64-NEXT:    li a3, 378
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 4
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_896
+; CHECK-RV64-NEXT:    j .LBB61_390
+; CHECK-RV64-NEXT:  .LBB61_896: # %cond.load1513
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 380
+; CHECK-RV64-NEXT:    li a3, 379
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 3
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_897
+; CHECK-RV64-NEXT:    j .LBB61_391
+; CHECK-RV64-NEXT:  .LBB61_897: # %cond.load1517
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 381
+; CHECK-RV64-NEXT:    li a3, 380
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 2
+; CHECK-RV64-NEXT:    bgez a2, .LBB61_1030
+; CHECK-RV64-NEXT:    j .LBB61_392
+; CHECK-RV64-NEXT:  .LBB61_1030: # %cond.load1517
+; CHECK-RV64-NEXT:    j .LBB61_393
+; CHECK-RV64-NEXT:  .LBB61_898: # %cond.load1529
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 384
+; CHECK-RV64-NEXT:    li a3, 383
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 1
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_899
+; CHECK-RV64-NEXT:    j .LBB61_397
+; CHECK-RV64-NEXT:  .LBB61_899: # %cond.load1533
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 385
+; CHECK-RV64-NEXT:    li a3, 384
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 2
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_900
+; CHECK-RV64-NEXT:    j .LBB61_398
+; CHECK-RV64-NEXT:  .LBB61_900: # %cond.load1537
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 386
+; CHECK-RV64-NEXT:    li a3, 385
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 4
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_901
+; CHECK-RV64-NEXT:    j .LBB61_399
+; CHECK-RV64-NEXT:  .LBB61_901: # %cond.load1541
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 387
+; CHECK-RV64-NEXT:    li a3, 386
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 8
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_902
+; CHECK-RV64-NEXT:    j .LBB61_400
+; CHECK-RV64-NEXT:  .LBB61_902: # %cond.load1545
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 388
+; CHECK-RV64-NEXT:    li a3, 387
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 16
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_903
+; CHECK-RV64-NEXT:    j .LBB61_401
+; CHECK-RV64-NEXT:  .LBB61_903: # %cond.load1549
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 389
+; CHECK-RV64-NEXT:    li a3, 388
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 32
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_904
+; CHECK-RV64-NEXT:    j .LBB61_402
+; CHECK-RV64-NEXT:  .LBB61_904: # %cond.load1553
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 390
+; CHECK-RV64-NEXT:    li a3, 389
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 64
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_905
+; CHECK-RV64-NEXT:    j .LBB61_403
+; CHECK-RV64-NEXT:  .LBB61_905: # %cond.load1557
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 391
+; CHECK-RV64-NEXT:    li a3, 390
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 128
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_906
+; CHECK-RV64-NEXT:    j .LBB61_404
+; CHECK-RV64-NEXT:  .LBB61_906: # %cond.load1561
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 392
+; CHECK-RV64-NEXT:    li a3, 391
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 256
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_907
+; CHECK-RV64-NEXT:    j .LBB61_405
+; CHECK-RV64-NEXT:  .LBB61_907: # %cond.load1565
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 393
+; CHECK-RV64-NEXT:    li a3, 392
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 512
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_908
+; CHECK-RV64-NEXT:    j .LBB61_406
+; CHECK-RV64-NEXT:  .LBB61_908: # %cond.load1569
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 394
+; CHECK-RV64-NEXT:    li a3, 393
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a1, a2, 1024
+; CHECK-RV64-NEXT:    bnez a1, .LBB61_909
+; CHECK-RV64-NEXT:    j .LBB61_407
+; CHECK-RV64-NEXT:  .LBB61_909: # %cond.load1573
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 395
+; CHECK-RV64-NEXT:    li a3, 394
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 52
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_910
+; CHECK-RV64-NEXT:    j .LBB61_408
+; CHECK-RV64-NEXT:  .LBB61_910: # %cond.load1577
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 396
+; CHECK-RV64-NEXT:    li a3, 395
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 51
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_911
+; CHECK-RV64-NEXT:    j .LBB61_409
+; CHECK-RV64-NEXT:  .LBB61_911: # %cond.load1581
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 397
+; CHECK-RV64-NEXT:    li a3, 396
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 50
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_912
+; CHECK-RV64-NEXT:    j .LBB61_410
+; CHECK-RV64-NEXT:  .LBB61_912: # %cond.load1585
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 398
+; CHECK-RV64-NEXT:    li a3, 397
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 49
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_913
+; CHECK-RV64-NEXT:    j .LBB61_411
+; CHECK-RV64-NEXT:  .LBB61_913: # %cond.load1589
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 399
+; CHECK-RV64-NEXT:    li a3, 398
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 48
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_914
+; CHECK-RV64-NEXT:    j .LBB61_412
+; CHECK-RV64-NEXT:  .LBB61_914: # %cond.load1593
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 400
+; CHECK-RV64-NEXT:    li a3, 399
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 47
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_915
+; CHECK-RV64-NEXT:    j .LBB61_413
+; CHECK-RV64-NEXT:  .LBB61_915: # %cond.load1597
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 401
+; CHECK-RV64-NEXT:    li a3, 400
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 46
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_916
+; CHECK-RV64-NEXT:    j .LBB61_414
+; CHECK-RV64-NEXT:  .LBB61_916: # %cond.load1601
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 402
+; CHECK-RV64-NEXT:    li a3, 401
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 45
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_917
+; CHECK-RV64-NEXT:    j .LBB61_415
+; CHECK-RV64-NEXT:  .LBB61_917: # %cond.load1605
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 403
+; CHECK-RV64-NEXT:    li a3, 402
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 44
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_918
+; CHECK-RV64-NEXT:    j .LBB61_416
+; CHECK-RV64-NEXT:  .LBB61_918: # %cond.load1609
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 404
+; CHECK-RV64-NEXT:    li a3, 403
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 43
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_919
+; CHECK-RV64-NEXT:    j .LBB61_417
+; CHECK-RV64-NEXT:  .LBB61_919: # %cond.load1613
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 405
+; CHECK-RV64-NEXT:    li a3, 404
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 42
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_920
+; CHECK-RV64-NEXT:    j .LBB61_418
+; CHECK-RV64-NEXT:  .LBB61_920: # %cond.load1617
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 406
+; CHECK-RV64-NEXT:    li a3, 405
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 41
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_921
+; CHECK-RV64-NEXT:    j .LBB61_419
+; CHECK-RV64-NEXT:  .LBB61_921: # %cond.load1621
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 407
+; CHECK-RV64-NEXT:    li a3, 406
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 40
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_922
+; CHECK-RV64-NEXT:    j .LBB61_420
+; CHECK-RV64-NEXT:  .LBB61_922: # %cond.load1625
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 408
+; CHECK-RV64-NEXT:    li a3, 407
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 39
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_923
+; CHECK-RV64-NEXT:    j .LBB61_421
+; CHECK-RV64-NEXT:  .LBB61_923: # %cond.load1629
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 409
+; CHECK-RV64-NEXT:    li a3, 408
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 38
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_924
+; CHECK-RV64-NEXT:    j .LBB61_422
+; CHECK-RV64-NEXT:  .LBB61_924: # %cond.load1633
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 410
+; CHECK-RV64-NEXT:    li a3, 409
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 37
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_925
+; CHECK-RV64-NEXT:    j .LBB61_423
+; CHECK-RV64-NEXT:  .LBB61_925: # %cond.load1637
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 411
+; CHECK-RV64-NEXT:    li a3, 410
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 36
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_926
+; CHECK-RV64-NEXT:    j .LBB61_424
+; CHECK-RV64-NEXT:  .LBB61_926: # %cond.load1641
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 412
+; CHECK-RV64-NEXT:    li a3, 411
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 35
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_927
+; CHECK-RV64-NEXT:    j .LBB61_425
+; CHECK-RV64-NEXT:  .LBB61_927: # %cond.load1645
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 413
+; CHECK-RV64-NEXT:    li a3, 412
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 34
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_928
+; CHECK-RV64-NEXT:    j .LBB61_426
+; CHECK-RV64-NEXT:  .LBB61_928: # %cond.load1649
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 414
+; CHECK-RV64-NEXT:    li a3, 413
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 33
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_929
+; CHECK-RV64-NEXT:    j .LBB61_427
+; CHECK-RV64-NEXT:  .LBB61_929: # %cond.load1653
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 415
+; CHECK-RV64-NEXT:    li a3, 414
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 32
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_930
+; CHECK-RV64-NEXT:    j .LBB61_428
+; CHECK-RV64-NEXT:  .LBB61_930: # %cond.load1657
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 416
+; CHECK-RV64-NEXT:    li a3, 415
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 31
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_931
+; CHECK-RV64-NEXT:    j .LBB61_429
+; CHECK-RV64-NEXT:  .LBB61_931: # %cond.load1661
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 417
+; CHECK-RV64-NEXT:    li a3, 416
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 30
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_932
+; CHECK-RV64-NEXT:    j .LBB61_430
+; CHECK-RV64-NEXT:  .LBB61_932: # %cond.load1665
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 418
+; CHECK-RV64-NEXT:    li a3, 417
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 29
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_933
+; CHECK-RV64-NEXT:    j .LBB61_431
+; CHECK-RV64-NEXT:  .LBB61_933: # %cond.load1669
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 419
+; CHECK-RV64-NEXT:    li a3, 418
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 28
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_934
+; CHECK-RV64-NEXT:    j .LBB61_432
+; CHECK-RV64-NEXT:  .LBB61_934: # %cond.load1673
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 420
+; CHECK-RV64-NEXT:    li a3, 419
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 27
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_935
+; CHECK-RV64-NEXT:    j .LBB61_433
+; CHECK-RV64-NEXT:  .LBB61_935: # %cond.load1677
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 421
+; CHECK-RV64-NEXT:    li a3, 420
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 26
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_936
+; CHECK-RV64-NEXT:    j .LBB61_434
+; CHECK-RV64-NEXT:  .LBB61_936: # %cond.load1681
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 422
+; CHECK-RV64-NEXT:    li a3, 421
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 25
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_937
+; CHECK-RV64-NEXT:    j .LBB61_435
+; CHECK-RV64-NEXT:  .LBB61_937: # %cond.load1685
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 423
+; CHECK-RV64-NEXT:    li a3, 422
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 24
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_938
+; CHECK-RV64-NEXT:    j .LBB61_436
+; CHECK-RV64-NEXT:  .LBB61_938: # %cond.load1689
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 424
+; CHECK-RV64-NEXT:    li a3, 423
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 23
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_939
+; CHECK-RV64-NEXT:    j .LBB61_437
+; CHECK-RV64-NEXT:  .LBB61_939: # %cond.load1693
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 425
+; CHECK-RV64-NEXT:    li a3, 424
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 22
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_940
+; CHECK-RV64-NEXT:    j .LBB61_438
+; CHECK-RV64-NEXT:  .LBB61_940: # %cond.load1697
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 426
+; CHECK-RV64-NEXT:    li a3, 425
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 21
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_941
+; CHECK-RV64-NEXT:    j .LBB61_439
+; CHECK-RV64-NEXT:  .LBB61_941: # %cond.load1701
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 427
+; CHECK-RV64-NEXT:    li a3, 426
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 20
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_942
+; CHECK-RV64-NEXT:    j .LBB61_440
+; CHECK-RV64-NEXT:  .LBB61_942: # %cond.load1705
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 428
+; CHECK-RV64-NEXT:    li a3, 427
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 19
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_943
+; CHECK-RV64-NEXT:    j .LBB61_441
+; CHECK-RV64-NEXT:  .LBB61_943: # %cond.load1709
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 429
+; CHECK-RV64-NEXT:    li a3, 428
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 18
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_944
+; CHECK-RV64-NEXT:    j .LBB61_442
+; CHECK-RV64-NEXT:  .LBB61_944: # %cond.load1713
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 430
+; CHECK-RV64-NEXT:    li a3, 429
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 17
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_945
+; CHECK-RV64-NEXT:    j .LBB61_443
+; CHECK-RV64-NEXT:  .LBB61_945: # %cond.load1717
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 431
+; CHECK-RV64-NEXT:    li a3, 430
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 16
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_946
+; CHECK-RV64-NEXT:    j .LBB61_444
+; CHECK-RV64-NEXT:  .LBB61_946: # %cond.load1721
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 432
+; CHECK-RV64-NEXT:    li a3, 431
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 15
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_947
+; CHECK-RV64-NEXT:    j .LBB61_445
+; CHECK-RV64-NEXT:  .LBB61_947: # %cond.load1725
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 433
+; CHECK-RV64-NEXT:    li a3, 432
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 14
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_948
+; CHECK-RV64-NEXT:    j .LBB61_446
+; CHECK-RV64-NEXT:  .LBB61_948: # %cond.load1729
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 434
+; CHECK-RV64-NEXT:    li a3, 433
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 13
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_949
+; CHECK-RV64-NEXT:    j .LBB61_447
+; CHECK-RV64-NEXT:  .LBB61_949: # %cond.load1733
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 435
+; CHECK-RV64-NEXT:    li a3, 434
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 12
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_950
+; CHECK-RV64-NEXT:    j .LBB61_448
+; CHECK-RV64-NEXT:  .LBB61_950: # %cond.load1737
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 436
+; CHECK-RV64-NEXT:    li a3, 435
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 11
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_951
+; CHECK-RV64-NEXT:    j .LBB61_449
+; CHECK-RV64-NEXT:  .LBB61_951: # %cond.load1741
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 437
+; CHECK-RV64-NEXT:    li a3, 436
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 10
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_952
+; CHECK-RV64-NEXT:    j .LBB61_450
+; CHECK-RV64-NEXT:  .LBB61_952: # %cond.load1745
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 438
+; CHECK-RV64-NEXT:    li a3, 437
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 9
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_953
+; CHECK-RV64-NEXT:    j .LBB61_451
+; CHECK-RV64-NEXT:  .LBB61_953: # %cond.load1749
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 439
+; CHECK-RV64-NEXT:    li a3, 438
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 8
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_954
+; CHECK-RV64-NEXT:    j .LBB61_452
+; CHECK-RV64-NEXT:  .LBB61_954: # %cond.load1753
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 440
+; CHECK-RV64-NEXT:    li a3, 439
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 7
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_955
+; CHECK-RV64-NEXT:    j .LBB61_453
+; CHECK-RV64-NEXT:  .LBB61_955: # %cond.load1757
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 441
+; CHECK-RV64-NEXT:    li a3, 440
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 6
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_956
+; CHECK-RV64-NEXT:    j .LBB61_454
+; CHECK-RV64-NEXT:  .LBB61_956: # %cond.load1761
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 442
+; CHECK-RV64-NEXT:    li a3, 441
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 5
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_957
+; CHECK-RV64-NEXT:    j .LBB61_455
+; CHECK-RV64-NEXT:  .LBB61_957: # %cond.load1765
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 443
+; CHECK-RV64-NEXT:    li a3, 442
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 4
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_958
+; CHECK-RV64-NEXT:    j .LBB61_456
+; CHECK-RV64-NEXT:  .LBB61_958: # %cond.load1769
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 444
+; CHECK-RV64-NEXT:    li a3, 443
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 3
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_959
+; CHECK-RV64-NEXT:    j .LBB61_457
+; CHECK-RV64-NEXT:  .LBB61_959: # %cond.load1773
+; CHECK-RV64-NEXT:    lbu a1, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a1
+; CHECK-RV64-NEXT:    li a1, 445
+; CHECK-RV64-NEXT:    li a3, 444
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a1, a2, 2
+; CHECK-RV64-NEXT:    bgez a1, .LBB61_1031
+; CHECK-RV64-NEXT:    j .LBB61_458
+; CHECK-RV64-NEXT:  .LBB61_1031: # %cond.load1773
+; CHECK-RV64-NEXT:    j .LBB61_459
+; CHECK-RV64-NEXT:  .LBB61_960: # %cond.load1785
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 448
+; CHECK-RV64-NEXT:    li a3, 447
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 1
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_961
+; CHECK-RV64-NEXT:    j .LBB61_463
+; CHECK-RV64-NEXT:  .LBB61_961: # %cond.load1789
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 449
+; CHECK-RV64-NEXT:    li a3, 448
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 2
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_962
+; CHECK-RV64-NEXT:    j .LBB61_464
+; CHECK-RV64-NEXT:  .LBB61_962: # %cond.load1793
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 450
+; CHECK-RV64-NEXT:    li a3, 449
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 4
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_963
+; CHECK-RV64-NEXT:    j .LBB61_465
+; CHECK-RV64-NEXT:  .LBB61_963: # %cond.load1797
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 451
+; CHECK-RV64-NEXT:    li a3, 450
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 8
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_964
+; CHECK-RV64-NEXT:    j .LBB61_466
+; CHECK-RV64-NEXT:  .LBB61_964: # %cond.load1801
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 452
+; CHECK-RV64-NEXT:    li a3, 451
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 16
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_965
+; CHECK-RV64-NEXT:    j .LBB61_467
+; CHECK-RV64-NEXT:  .LBB61_965: # %cond.load1805
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 453
+; CHECK-RV64-NEXT:    li a3, 452
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 32
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_966
+; CHECK-RV64-NEXT:    j .LBB61_468
+; CHECK-RV64-NEXT:  .LBB61_966: # %cond.load1809
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 454
+; CHECK-RV64-NEXT:    li a3, 453
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 64
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_967
+; CHECK-RV64-NEXT:    j .LBB61_469
+; CHECK-RV64-NEXT:  .LBB61_967: # %cond.load1813
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 455
+; CHECK-RV64-NEXT:    li a3, 454
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 128
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_968
+; CHECK-RV64-NEXT:    j .LBB61_470
+; CHECK-RV64-NEXT:  .LBB61_968: # %cond.load1817
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 456
+; CHECK-RV64-NEXT:    li a3, 455
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 256
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_969
+; CHECK-RV64-NEXT:    j .LBB61_471
+; CHECK-RV64-NEXT:  .LBB61_969: # %cond.load1821
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 457
+; CHECK-RV64-NEXT:    li a3, 456
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 512
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_970
+; CHECK-RV64-NEXT:    j .LBB61_472
+; CHECK-RV64-NEXT:  .LBB61_970: # %cond.load1825
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 458
+; CHECK-RV64-NEXT:    li a3, 457
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    andi a2, a1, 1024
+; CHECK-RV64-NEXT:    bnez a2, .LBB61_971
+; CHECK-RV64-NEXT:    j .LBB61_473
+; CHECK-RV64-NEXT:  .LBB61_971: # %cond.load1829
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 459
+; CHECK-RV64-NEXT:    li a3, 458
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 52
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_972
+; CHECK-RV64-NEXT:    j .LBB61_474
+; CHECK-RV64-NEXT:  .LBB61_972: # %cond.load1833
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 460
+; CHECK-RV64-NEXT:    li a3, 459
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 51
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_973
+; CHECK-RV64-NEXT:    j .LBB61_475
+; CHECK-RV64-NEXT:  .LBB61_973: # %cond.load1837
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 461
+; CHECK-RV64-NEXT:    li a3, 460
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 50
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_974
+; CHECK-RV64-NEXT:    j .LBB61_476
+; CHECK-RV64-NEXT:  .LBB61_974: # %cond.load1841
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 462
+; CHECK-RV64-NEXT:    li a3, 461
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 49
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_975
+; CHECK-RV64-NEXT:    j .LBB61_477
+; CHECK-RV64-NEXT:  .LBB61_975: # %cond.load1845
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 463
+; CHECK-RV64-NEXT:    li a3, 462
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 48
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_976
+; CHECK-RV64-NEXT:    j .LBB61_478
+; CHECK-RV64-NEXT:  .LBB61_976: # %cond.load1849
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 464
+; CHECK-RV64-NEXT:    li a3, 463
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 47
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_977
+; CHECK-RV64-NEXT:    j .LBB61_479
+; CHECK-RV64-NEXT:  .LBB61_977: # %cond.load1853
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 465
+; CHECK-RV64-NEXT:    li a3, 464
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 46
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_978
+; CHECK-RV64-NEXT:    j .LBB61_480
+; CHECK-RV64-NEXT:  .LBB61_978: # %cond.load1857
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 466
+; CHECK-RV64-NEXT:    li a3, 465
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 45
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_979
+; CHECK-RV64-NEXT:    j .LBB61_481
+; CHECK-RV64-NEXT:  .LBB61_979: # %cond.load1861
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 467
+; CHECK-RV64-NEXT:    li a3, 466
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 44
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_980
+; CHECK-RV64-NEXT:    j .LBB61_482
+; CHECK-RV64-NEXT:  .LBB61_980: # %cond.load1865
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 468
+; CHECK-RV64-NEXT:    li a3, 467
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 43
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_981
+; CHECK-RV64-NEXT:    j .LBB61_483
+; CHECK-RV64-NEXT:  .LBB61_981: # %cond.load1869
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 469
+; CHECK-RV64-NEXT:    li a3, 468
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 42
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_982
+; CHECK-RV64-NEXT:    j .LBB61_484
+; CHECK-RV64-NEXT:  .LBB61_982: # %cond.load1873
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 470
+; CHECK-RV64-NEXT:    li a3, 469
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 41
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_983
+; CHECK-RV64-NEXT:    j .LBB61_485
+; CHECK-RV64-NEXT:  .LBB61_983: # %cond.load1877
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 471
+; CHECK-RV64-NEXT:    li a3, 470
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 40
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_984
+; CHECK-RV64-NEXT:    j .LBB61_486
+; CHECK-RV64-NEXT:  .LBB61_984: # %cond.load1881
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 472
+; CHECK-RV64-NEXT:    li a3, 471
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 39
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_985
+; CHECK-RV64-NEXT:    j .LBB61_487
+; CHECK-RV64-NEXT:  .LBB61_985: # %cond.load1885
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 473
+; CHECK-RV64-NEXT:    li a3, 472
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 38
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_986
+; CHECK-RV64-NEXT:    j .LBB61_488
+; CHECK-RV64-NEXT:  .LBB61_986: # %cond.load1889
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 474
+; CHECK-RV64-NEXT:    li a3, 473
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 37
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_987
+; CHECK-RV64-NEXT:    j .LBB61_489
+; CHECK-RV64-NEXT:  .LBB61_987: # %cond.load1893
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 475
+; CHECK-RV64-NEXT:    li a3, 474
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 36
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_988
+; CHECK-RV64-NEXT:    j .LBB61_490
+; CHECK-RV64-NEXT:  .LBB61_988: # %cond.load1897
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 476
+; CHECK-RV64-NEXT:    li a3, 475
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 35
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_989
+; CHECK-RV64-NEXT:    j .LBB61_491
+; CHECK-RV64-NEXT:  .LBB61_989: # %cond.load1901
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 477
+; CHECK-RV64-NEXT:    li a3, 476
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 34
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_990
+; CHECK-RV64-NEXT:    j .LBB61_492
+; CHECK-RV64-NEXT:  .LBB61_990: # %cond.load1905
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 478
+; CHECK-RV64-NEXT:    li a3, 477
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 33
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_991
+; CHECK-RV64-NEXT:    j .LBB61_493
+; CHECK-RV64-NEXT:  .LBB61_991: # %cond.load1909
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 479
+; CHECK-RV64-NEXT:    li a3, 478
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 32
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_992
+; CHECK-RV64-NEXT:    j .LBB61_494
+; CHECK-RV64-NEXT:  .LBB61_992: # %cond.load1913
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 480
+; CHECK-RV64-NEXT:    li a3, 479
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 31
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_993
+; CHECK-RV64-NEXT:    j .LBB61_495
+; CHECK-RV64-NEXT:  .LBB61_993: # %cond.load1917
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 481
+; CHECK-RV64-NEXT:    li a3, 480
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 30
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_994
+; CHECK-RV64-NEXT:    j .LBB61_496
+; CHECK-RV64-NEXT:  .LBB61_994: # %cond.load1921
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 482
+; CHECK-RV64-NEXT:    li a3, 481
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 29
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_995
+; CHECK-RV64-NEXT:    j .LBB61_497
+; CHECK-RV64-NEXT:  .LBB61_995: # %cond.load1925
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 483
+; CHECK-RV64-NEXT:    li a3, 482
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 28
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_996
+; CHECK-RV64-NEXT:    j .LBB61_498
+; CHECK-RV64-NEXT:  .LBB61_996: # %cond.load1929
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 484
+; CHECK-RV64-NEXT:    li a3, 483
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 27
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_997
+; CHECK-RV64-NEXT:    j .LBB61_499
+; CHECK-RV64-NEXT:  .LBB61_997: # %cond.load1933
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 485
+; CHECK-RV64-NEXT:    li a3, 484
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 26
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_998
+; CHECK-RV64-NEXT:    j .LBB61_500
+; CHECK-RV64-NEXT:  .LBB61_998: # %cond.load1937
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 486
+; CHECK-RV64-NEXT:    li a3, 485
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 25
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_999
+; CHECK-RV64-NEXT:    j .LBB61_501
+; CHECK-RV64-NEXT:  .LBB61_999: # %cond.load1941
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 487
+; CHECK-RV64-NEXT:    li a3, 486
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 24
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1000
+; CHECK-RV64-NEXT:    j .LBB61_502
+; CHECK-RV64-NEXT:  .LBB61_1000: # %cond.load1945
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 488
+; CHECK-RV64-NEXT:    li a3, 487
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 23
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1001
+; CHECK-RV64-NEXT:    j .LBB61_503
+; CHECK-RV64-NEXT:  .LBB61_1001: # %cond.load1949
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 489
+; CHECK-RV64-NEXT:    li a3, 488
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 22
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1002
+; CHECK-RV64-NEXT:    j .LBB61_504
+; CHECK-RV64-NEXT:  .LBB61_1002: # %cond.load1953
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 490
+; CHECK-RV64-NEXT:    li a3, 489
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 21
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1003
+; CHECK-RV64-NEXT:    j .LBB61_505
+; CHECK-RV64-NEXT:  .LBB61_1003: # %cond.load1957
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 491
+; CHECK-RV64-NEXT:    li a3, 490
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 20
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1004
+; CHECK-RV64-NEXT:    j .LBB61_506
+; CHECK-RV64-NEXT:  .LBB61_1004: # %cond.load1961
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 492
+; CHECK-RV64-NEXT:    li a3, 491
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 19
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1005
+; CHECK-RV64-NEXT:    j .LBB61_507
+; CHECK-RV64-NEXT:  .LBB61_1005: # %cond.load1965
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 493
+; CHECK-RV64-NEXT:    li a3, 492
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 18
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1006
+; CHECK-RV64-NEXT:    j .LBB61_508
+; CHECK-RV64-NEXT:  .LBB61_1006: # %cond.load1969
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 494
+; CHECK-RV64-NEXT:    li a3, 493
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 17
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1007
+; CHECK-RV64-NEXT:    j .LBB61_509
+; CHECK-RV64-NEXT:  .LBB61_1007: # %cond.load1973
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 495
+; CHECK-RV64-NEXT:    li a3, 494
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 16
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1008
+; CHECK-RV64-NEXT:    j .LBB61_510
+; CHECK-RV64-NEXT:  .LBB61_1008: # %cond.load1977
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 496
+; CHECK-RV64-NEXT:    li a3, 495
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 15
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1009
+; CHECK-RV64-NEXT:    j .LBB61_511
+; CHECK-RV64-NEXT:  .LBB61_1009: # %cond.load1981
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 497
+; CHECK-RV64-NEXT:    li a3, 496
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 14
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1010
+; CHECK-RV64-NEXT:    j .LBB61_512
+; CHECK-RV64-NEXT:  .LBB61_1010: # %cond.load1985
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 498
+; CHECK-RV64-NEXT:    li a3, 497
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 13
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1011
+; CHECK-RV64-NEXT:    j .LBB61_513
+; CHECK-RV64-NEXT:  .LBB61_1011: # %cond.load1989
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 499
+; CHECK-RV64-NEXT:    li a3, 498
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 12
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1012
+; CHECK-RV64-NEXT:    j .LBB61_514
+; CHECK-RV64-NEXT:  .LBB61_1012: # %cond.load1993
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 500
+; CHECK-RV64-NEXT:    li a3, 499
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 11
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1013
+; CHECK-RV64-NEXT:    j .LBB61_515
+; CHECK-RV64-NEXT:  .LBB61_1013: # %cond.load1997
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 501
+; CHECK-RV64-NEXT:    li a3, 500
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 10
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1014
+; CHECK-RV64-NEXT:    j .LBB61_516
+; CHECK-RV64-NEXT:  .LBB61_1014: # %cond.load2001
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 502
+; CHECK-RV64-NEXT:    li a3, 501
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 9
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1015
+; CHECK-RV64-NEXT:    j .LBB61_517
+; CHECK-RV64-NEXT:  .LBB61_1015: # %cond.load2005
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 503
+; CHECK-RV64-NEXT:    li a3, 502
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 8
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1016
+; CHECK-RV64-NEXT:    j .LBB61_518
+; CHECK-RV64-NEXT:  .LBB61_1016: # %cond.load2009
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 504
+; CHECK-RV64-NEXT:    li a3, 503
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 7
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1017
+; CHECK-RV64-NEXT:    j .LBB61_519
+; CHECK-RV64-NEXT:  .LBB61_1017: # %cond.load2013
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 505
+; CHECK-RV64-NEXT:    li a3, 504
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 6
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1018
+; CHECK-RV64-NEXT:    j .LBB61_520
+; CHECK-RV64-NEXT:  .LBB61_1018: # %cond.load2017
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 506
+; CHECK-RV64-NEXT:    li a3, 505
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 5
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1019
+; CHECK-RV64-NEXT:    j .LBB61_521
+; CHECK-RV64-NEXT:  .LBB61_1019: # %cond.load2021
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 507
+; CHECK-RV64-NEXT:    li a3, 506
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 4
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1020
+; CHECK-RV64-NEXT:    j .LBB61_522
+; CHECK-RV64-NEXT:  .LBB61_1020: # %cond.load2025
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 508
+; CHECK-RV64-NEXT:    li a3, 507
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 3
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1021
+; CHECK-RV64-NEXT:    j .LBB61_523
+; CHECK-RV64-NEXT:  .LBB61_1021: # %cond.load2029
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 509
+; CHECK-RV64-NEXT:    li a3, 508
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 2
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1022
+; CHECK-RV64-NEXT:    j .LBB61_524
+; CHECK-RV64-NEXT:  .LBB61_1022: # %cond.load2033
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 510
+; CHECK-RV64-NEXT:    li a3, 509
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    slli a2, a1, 1
+; CHECK-RV64-NEXT:    bltz a2, .LBB61_1023
+; CHECK-RV64-NEXT:    j .LBB61_525
+; CHECK-RV64-NEXT:  .LBB61_1023: # %cond.load2037
+; CHECK-RV64-NEXT:    lbu a2, 0(a0)
+; CHECK-RV64-NEXT:    li a3, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a3, e8, m1, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a2
+; CHECK-RV64-NEXT:    li a2, 511
+; CHECK-RV64-NEXT:    li a3, 510
+; CHECK-RV64-NEXT:    vsetvli zero, a2, e8, m8, tu, ma
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a3
+; CHECK-RV64-NEXT:    addi a0, a0, 1
+; CHECK-RV64-NEXT:    bltz a1, .LBB61_1024
+; CHECK-RV64-NEXT:    j .LBB61_526
+; CHECK-RV64-NEXT:  .LBB61_1024: # %cond.load2041
+; CHECK-RV64-NEXT:    lbu a0, 0(a0)
+; CHECK-RV64-NEXT:    li a1, 512
+; CHECK-RV64-NEXT:    vsetvli zero, a1, e8, m8, ta, ma
+; CHECK-RV64-NEXT:    vmv.s.x v16, a0
+; CHECK-RV64-NEXT:    li a0, 511
+; CHECK-RV64-NEXT:    vslideup.vx v8, v16, a0
+; CHECK-RV64-NEXT:    ret
   %res = call <512 x i8> @llvm.masked.expandload.v512i8(ptr align 1 %base, <512 x i1> %mask, <512 x i8> %passthru)
   ret <512 x i8> %res
 }
diff --git a/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-expandload-fp.ll b/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-expandload-fp.ll
index c538a6de7bef3b..479438da677c1d 100644
--- a/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-expandload-fp.ll
+++ b/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-expandload-fp.ll
@@ -1,348 +1,200 @@
 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 4
 ; RUN: llc -verify-machineinstrs -mtriple=riscv32 -mattr=+m,+v,+f,+d,+zfh,+zvfh %s -o - \
-; RUN:   | FileCheck %s --check-prefixes=CHECK-VRGATHER,CHECK-VRGATHER-RV32
+; RUN:   | FileCheck %s --check-prefixes=CHECK,CHECK-RV32
 ; RUN: llc -verify-machineinstrs -mtriple=riscv64 -mattr=+m,+v,+f,+d,+zfh,+zvfh %s -o - \
-; RUN:   | FileCheck %s --check-prefixes=CHECK-VRGATHER,CHECK-VRGATHER-RV64
-; RUN: llc -verify-machineinstrs -mtriple=riscv32 -mattr=+m,+v,+f,+d,+zfh,+zvfh,+optimized-indexed-load-store %s -o - \
-; RUN:   | FileCheck %s --check-prefixes=CHECK-INDEXED,CHECK-INDEXED-RV32
-; RUN: llc -verify-machineinstrs -mtriple=riscv64 -mattr=+m,+v,+f,+d,+zfh,+zvfh,+optimized-indexed-load-store %s -o - \
-; RUN:   | FileCheck %s --check-prefixes=CHECK-INDEXED,CHECK-INDEXED-RV64
+; RUN:   | FileCheck %s --check-prefixes=CHECK,CHECK-RV64
 
 declare <1 x half> @llvm.masked.expandload.v1f16(ptr, <1 x i1>, <1 x half>)
 define <1 x half> @expandload_v1f16(ptr %base, <1 x half> %src0, <1 x i1> %mask) {
-; CHECK-VRGATHER-LABEL: expandload_v1f16:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e8, mf8, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e16, mf4, ta, ma
-; CHECK-VRGATHER-NEXT:    vle16.v v9, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e16, mf4, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v10, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: expandload_v1f16:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    vsetivli zero, 1, e16, mf4, ta, ma
-; CHECK-INDEXED-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 1, v0.t
-; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e16, mf4, ta, mu
-; CHECK-INDEXED-NEXT:    vluxei16.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: expandload_v1f16:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 1, e8, mf8, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e16, mf4, ta, ma
+; CHECK-NEXT:    vle16.v v9, (a0)
+; CHECK-NEXT:    vsetivli zero, 1, e16, mf4, ta, mu
+; CHECK-NEXT:    viota.m v10, v0
+; CHECK-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-NEXT:    ret
   %res = call <1 x half> @llvm.masked.expandload.v1f16(ptr align 2 %base, <1 x i1> %mask, <1 x half> %src0)
   ret <1 x half>%res
 }
 
 declare <2 x half> @llvm.masked.expandload.v2f16(ptr, <2 x i1>, <2 x half>)
 define <2 x half> @expandload_v2f16(ptr %base, <2 x half> %src0, <2 x i1> %mask) {
-; CHECK-VRGATHER-LABEL: expandload_v2f16:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e16, mf4, ta, ma
-; CHECK-VRGATHER-NEXT:    vle16.v v9, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e16, mf4, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v10, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: expandload_v2f16:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    vsetivli zero, 2, e16, mf4, ta, ma
-; CHECK-INDEXED-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 1, v0.t
-; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e16, mf4, ta, mu
-; CHECK-INDEXED-NEXT:    vluxei16.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: expandload_v2f16:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e16, mf4, ta, ma
+; CHECK-NEXT:    vle16.v v9, (a0)
+; CHECK-NEXT:    vsetivli zero, 2, e16, mf4, ta, mu
+; CHECK-NEXT:    viota.m v10, v0
+; CHECK-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-NEXT:    ret
   %res = call <2 x half> @llvm.masked.expandload.v2f16(ptr align 2 %base, <2 x i1> %mask, <2 x half> %src0)
   ret <2 x half>%res
 }
 
 declare <4 x half> @llvm.masked.expandload.v4f16(ptr, <4 x i1>, <4 x half>)
 define <4 x half> @expandload_v4f16(ptr %base, <4 x half> %src0, <4 x i1> %mask) {
-; CHECK-VRGATHER-LABEL: expandload_v4f16:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e8, mf4, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e16, mf2, ta, ma
-; CHECK-VRGATHER-NEXT:    vle16.v v9, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e16, mf2, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v10, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: expandload_v4f16:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    vsetivli zero, 4, e16, mf2, ta, ma
-; CHECK-INDEXED-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 1, v0.t
-; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e16, mf2, ta, mu
-; CHECK-INDEXED-NEXT:    vluxei16.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: expandload_v4f16:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 4, e8, mf4, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e16, mf2, ta, ma
+; CHECK-NEXT:    vle16.v v9, (a0)
+; CHECK-NEXT:    vsetivli zero, 4, e16, mf2, ta, mu
+; CHECK-NEXT:    viota.m v10, v0
+; CHECK-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-NEXT:    ret
   %res = call <4 x half> @llvm.masked.expandload.v4f16(ptr align 2 %base, <4 x i1> %mask, <4 x half> %src0)
   ret <4 x half>%res
 }
 
 declare <8 x half> @llvm.masked.expandload.v8f16(ptr, <8 x i1>, <8 x half>)
 define <8 x half> @expandload_v8f16(ptr %base, <8 x half> %src0, <8 x i1> %mask) {
-; CHECK-VRGATHER-LABEL: expandload_v8f16:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e16, m1, ta, ma
-; CHECK-VRGATHER-NEXT:    vle16.v v9, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e16, m1, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v10, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: expandload_v8f16:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    vsetivli zero, 8, e16, m1, ta, ma
-; CHECK-INDEXED-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 1, v0.t
-; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e16, m1, ta, mu
-; CHECK-INDEXED-NEXT:    vluxei16.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: expandload_v8f16:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e16, m1, ta, ma
+; CHECK-NEXT:    vle16.v v9, (a0)
+; CHECK-NEXT:    vsetivli zero, 8, e16, m1, ta, mu
+; CHECK-NEXT:    viota.m v10, v0
+; CHECK-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-NEXT:    ret
   %res = call <8 x half> @llvm.masked.expandload.v8f16(ptr align 2 %base, <8 x i1> %mask, <8 x half> %src0)
   ret <8 x half>%res
 }
 
 declare <1 x float> @llvm.masked.expandload.v1f32(ptr, <1 x i1>, <1 x float>)
 define <1 x float> @expandload_v1f32(ptr %base, <1 x float> %src0, <1 x i1> %mask) {
-; CHECK-VRGATHER-LABEL: expandload_v1f32:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e8, mf8, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e32, mf2, ta, ma
-; CHECK-VRGATHER-NEXT:    vle32.v v9, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e32, mf2, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v10, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: expandload_v1f32:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    vsetivli zero, 1, e32, mf2, ta, ma
-; CHECK-INDEXED-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 2, v0.t
-; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e32, mf2, ta, mu
-; CHECK-INDEXED-NEXT:    vluxei32.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: expandload_v1f32:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 1, e8, mf8, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e32, mf2, ta, ma
+; CHECK-NEXT:    vle32.v v9, (a0)
+; CHECK-NEXT:    vsetivli zero, 1, e32, mf2, ta, mu
+; CHECK-NEXT:    viota.m v10, v0
+; CHECK-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-NEXT:    ret
   %res = call <1 x float> @llvm.masked.expandload.v1f32(ptr align 4 %base, <1 x i1> %mask, <1 x float> %src0)
   ret <1 x float>%res
 }
 
 declare <2 x float> @llvm.masked.expandload.v2f32(ptr, <2 x i1>, <2 x float>)
 define <2 x float> @expandload_v2f32(ptr %base, <2 x float> %src0, <2 x i1> %mask) {
-; CHECK-VRGATHER-LABEL: expandload_v2f32:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e32, mf2, ta, ma
-; CHECK-VRGATHER-NEXT:    vle32.v v9, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e32, mf2, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v10, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: expandload_v2f32:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
-; CHECK-INDEXED-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 2, v0.t
-; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e32, mf2, ta, mu
-; CHECK-INDEXED-NEXT:    vluxei32.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: expandload_v2f32:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e32, mf2, ta, ma
+; CHECK-NEXT:    vle32.v v9, (a0)
+; CHECK-NEXT:    vsetivli zero, 2, e32, mf2, ta, mu
+; CHECK-NEXT:    viota.m v10, v0
+; CHECK-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-NEXT:    ret
   %res = call <2 x float> @llvm.masked.expandload.v2f32(ptr align 4 %base, <2 x i1> %mask, <2 x float> %src0)
   ret <2 x float>%res
 }
 
 declare <4 x float> @llvm.masked.expandload.v4f32(ptr, <4 x i1>, <4 x float>)
 define <4 x float> @expandload_v4f32(ptr %base, <4 x float> %src0, <4 x i1> %mask) {
-; CHECK-VRGATHER-LABEL: expandload_v4f32:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e8, mf4, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e32, m1, ta, ma
-; CHECK-VRGATHER-NEXT:    vle32.v v9, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e32, m1, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v10, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: expandload_v4f32:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
-; CHECK-INDEXED-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 2, v0.t
-; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e32, m1, ta, mu
-; CHECK-INDEXED-NEXT:    vluxei32.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: expandload_v4f32:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 4, e8, mf4, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e32, m1, ta, ma
+; CHECK-NEXT:    vle32.v v9, (a0)
+; CHECK-NEXT:    vsetivli zero, 4, e32, m1, ta, mu
+; CHECK-NEXT:    viota.m v10, v0
+; CHECK-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-NEXT:    ret
   %res = call <4 x float> @llvm.masked.expandload.v4f32(ptr align 4 %base, <4 x i1> %mask, <4 x float> %src0)
   ret <4 x float>%res
 }
 
 declare <8 x float> @llvm.masked.expandload.v8f32(ptr, <8 x i1>, <8 x float>)
 define <8 x float> @expandload_v8f32(ptr %base, <8 x float> %src0, <8 x i1> %mask) {
-; CHECK-VRGATHER-LABEL: expandload_v8f32:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e32, m2, ta, ma
-; CHECK-VRGATHER-NEXT:    vle32.v v10, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e32, m2, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v12, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v10, v12, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: expandload_v8f32:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
-; CHECK-INDEXED-NEXT:    viota.m v10, v0
-; CHECK-INDEXED-NEXT:    vsll.vi v10, v10, 2, v0.t
-; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e32, m2, ta, mu
-; CHECK-INDEXED-NEXT:    vluxei32.v v8, (a0), v10, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: expandload_v8f32:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e32, m2, ta, ma
+; CHECK-NEXT:    vle32.v v10, (a0)
+; CHECK-NEXT:    vsetivli zero, 8, e32, m2, ta, mu
+; CHECK-NEXT:    viota.m v12, v0
+; CHECK-NEXT:    vrgather.vv v8, v10, v12, v0.t
+; CHECK-NEXT:    ret
   %res = call <8 x float> @llvm.masked.expandload.v8f32(ptr align 4 %base, <8 x i1> %mask, <8 x float> %src0)
   ret <8 x float>%res
 }
 
 declare <1 x double> @llvm.masked.expandload.v1f64(ptr, <1 x i1>, <1 x double>)
 define <1 x double> @expandload_v1f64(ptr %base, <1 x double> %src0, <1 x i1> %mask) {
-; CHECK-VRGATHER-LABEL: expandload_v1f64:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e8, mf8, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
-; CHECK-VRGATHER-NEXT:    vle64.v v9, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e64, m1, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v10, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-RV32-LABEL: expandload_v1f64:
-; CHECK-INDEXED-RV32:       # %bb.0:
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e32, mf2, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-RV32-NEXT:    vsll.vi v9, v9, 3, v0.t
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
-; CHECK-INDEXED-RV32-NEXT:    vluxei32.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-RV32-NEXT:    ret
-;
-; CHECK-INDEXED-RV64-LABEL: expandload_v1f64:
-; CHECK-INDEXED-RV64:       # %bb.0:
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-RV64-NEXT:    vsll.vi v9, v9, 3, v0.t
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
-; CHECK-INDEXED-RV64-NEXT:    vluxei64.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-RV64-NEXT:    ret
+; CHECK-LABEL: expandload_v1f64:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 1, e8, mf8, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
+; CHECK-NEXT:    vle64.v v9, (a0)
+; CHECK-NEXT:    vsetivli zero, 1, e64, m1, ta, mu
+; CHECK-NEXT:    viota.m v10, v0
+; CHECK-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-NEXT:    ret
   %res = call <1 x double> @llvm.masked.expandload.v1f64(ptr align 8 %base, <1 x i1> %mask, <1 x double> %src0)
   ret <1 x double>%res
 }
 
 declare <2 x double> @llvm.masked.expandload.v2f64(ptr, <2 x i1>, <2 x double>)
 define <2 x double> @expandload_v2f64(ptr %base, <2 x double> %src0, <2 x i1> %mask) {
-; CHECK-VRGATHER-LABEL: expandload_v2f64:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
-; CHECK-VRGATHER-NEXT:    vle64.v v9, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e64, m1, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v10, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-RV32-LABEL: expandload_v2f64:
-; CHECK-INDEXED-RV32:       # %bb.0:
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-RV32-NEXT:    vsll.vi v9, v9, 3, v0.t
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
-; CHECK-INDEXED-RV32-NEXT:    vluxei32.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-RV32-NEXT:    ret
-;
-; CHECK-INDEXED-RV64-LABEL: expandload_v2f64:
-; CHECK-INDEXED-RV64:       # %bb.0:
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 2, e64, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-RV64-NEXT:    vsll.vi v9, v9, 3, v0.t
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
-; CHECK-INDEXED-RV64-NEXT:    vluxei64.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-RV64-NEXT:    ret
+; CHECK-LABEL: expandload_v2f64:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
+; CHECK-NEXT:    vle64.v v9, (a0)
+; CHECK-NEXT:    vsetivli zero, 2, e64, m1, ta, mu
+; CHECK-NEXT:    viota.m v10, v0
+; CHECK-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-NEXT:    ret
   %res = call <2 x double> @llvm.masked.expandload.v2f64(ptr align 8 %base, <2 x i1> %mask, <2 x double> %src0)
   ret <2 x double>%res
 }
 
 declare <4 x double> @llvm.masked.expandload.v4f64(ptr, <4 x i1>, <4 x double>)
 define <4 x double> @expandload_v4f64(ptr %base, <4 x double> %src0, <4 x i1> %mask) {
-; CHECK-VRGATHER-LABEL: expandload_v4f64:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e8, mf4, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e64, m2, ta, ma
-; CHECK-VRGATHER-NEXT:    vle64.v v10, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e64, m2, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v12, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v10, v12, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-RV32-LABEL: expandload_v4f64:
-; CHECK-INDEXED-RV32:       # %bb.0:
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    viota.m v10, v0
-; CHECK-INDEXED-RV32-NEXT:    vsll.vi v10, v10, 3, v0.t
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e64, m2, ta, mu
-; CHECK-INDEXED-RV32-NEXT:    vluxei32.v v8, (a0), v10, v0.t
-; CHECK-INDEXED-RV32-NEXT:    ret
-;
-; CHECK-INDEXED-RV64-LABEL: expandload_v4f64:
-; CHECK-INDEXED-RV64:       # %bb.0:
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 4, e64, m2, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    viota.m v10, v0
-; CHECK-INDEXED-RV64-NEXT:    vsll.vi v10, v10, 3, v0.t
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e64, m2, ta, mu
-; CHECK-INDEXED-RV64-NEXT:    vluxei64.v v8, (a0), v10, v0.t
-; CHECK-INDEXED-RV64-NEXT:    ret
+; CHECK-LABEL: expandload_v4f64:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 4, e8, mf4, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e64, m2, ta, ma
+; CHECK-NEXT:    vle64.v v10, (a0)
+; CHECK-NEXT:    vsetivli zero, 4, e64, m2, ta, mu
+; CHECK-NEXT:    viota.m v12, v0
+; CHECK-NEXT:    vrgather.vv v8, v10, v12, v0.t
+; CHECK-NEXT:    ret
   %res = call <4 x double> @llvm.masked.expandload.v4f64(ptr align 8 %base, <4 x i1> %mask, <4 x double> %src0)
   ret <4 x double>%res
 }
 
 declare <8 x double> @llvm.masked.expandload.v8f64(ptr, <8 x i1>, <8 x double>)
 define <8 x double> @expandload_v8f64(ptr %base, <8 x double> %src0, <8 x i1> %mask) {
-; CHECK-VRGATHER-LABEL: expandload_v8f64:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e64, m4, ta, ma
-; CHECK-VRGATHER-NEXT:    vle64.v v12, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e64, m4, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v16, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v12, v16, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-RV32-LABEL: expandload_v8f64:
-; CHECK-INDEXED-RV32:       # %bb.0:
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    viota.m v12, v0
-; CHECK-INDEXED-RV32-NEXT:    vsll.vi v12, v12, 3, v0.t
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e64, m4, ta, mu
-; CHECK-INDEXED-RV32-NEXT:    vluxei32.v v8, (a0), v12, v0.t
-; CHECK-INDEXED-RV32-NEXT:    ret
-;
-; CHECK-INDEXED-RV64-LABEL: expandload_v8f64:
-; CHECK-INDEXED-RV64:       # %bb.0:
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 8, e64, m4, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    viota.m v12, v0
-; CHECK-INDEXED-RV64-NEXT:    vsll.vi v12, v12, 3, v0.t
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e64, m4, ta, mu
-; CHECK-INDEXED-RV64-NEXT:    vluxei64.v v8, (a0), v12, v0.t
-; CHECK-INDEXED-RV64-NEXT:    ret
+; CHECK-LABEL: expandload_v8f64:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e64, m4, ta, ma
+; CHECK-NEXT:    vle64.v v12, (a0)
+; CHECK-NEXT:    vsetivli zero, 8, e64, m4, ta, mu
+; CHECK-NEXT:    viota.m v16, v0
+; CHECK-NEXT:    vrgather.vv v8, v12, v16, v0.t
+; CHECK-NEXT:    ret
   %res = call <8 x double> @llvm.masked.expandload.v8f64(ptr align 8 %base, <8 x i1> %mask, <8 x double> %src0)
   ret <8 x double>%res
 }
 ;; NOTE: These prefixes are unused and the list is autogenerated. Do not add tests below this line:
-; CHECK-VRGATHER-RV32: {{.*}}
-; CHECK-VRGATHER-RV64: {{.*}}
+; CHECK-RV32: {{.*}}
+; CHECK-RV64: {{.*}}
diff --git a/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-expandload-int.ll b/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-expandload-int.ll
index 0fd990fc71344c..269d3df00f05db 100644
--- a/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-expandload-int.ll
+++ b/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-expandload-int.ll
@@ -1,440 +1,264 @@
 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 4
 ; RUN: llc -verify-machineinstrs -mtriple=riscv32 -mattr=+m,+v %s -o - \
-; RUN:   | FileCheck %s --check-prefixes=CHECK-VRGATHER,CHECK-VRGATHER-RV32
+; RUN:   | FileCheck %s --check-prefixes=CHECK,CHECK-RV32
 ; RUN: llc -verify-machineinstrs -mtriple=riscv64 -mattr=+m,+v %s -o - \
-; RUN:   | FileCheck %s --check-prefixes=CHECK-VRGATHER,CHECK-VRGATHER-RV64
-; RUN: llc -verify-machineinstrs -mtriple=riscv32 -mattr=+m,+v,+optimized-indexed-load-store %s -o - \
-; RUN:   | FileCheck %s --check-prefixes=CHECK-INDEXED,CHECK-INDEXED-RV32
-; RUN: llc -verify-machineinstrs -mtriple=riscv64 -mattr=+m,+v,+optimized-indexed-load-store %s -o - \
-; RUN:   | FileCheck %s --check-prefixes=CHECK-INDEXED,CHECK-INDEXED-RV64
+; RUN:   | FileCheck %s --check-prefixes=CHECK,CHECK-RV64
 
 declare <1 x i8> @llvm.masked.expandload.v1i8(ptr, <1 x i1>, <1 x i8>)
 define <1 x i8> @expandload_v1i8(ptr %base, <1 x i8> %src0, <1 x i1> %mask) {
-; CHECK-VRGATHER-LABEL: expandload_v1i8:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e8, mf8, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e8, mf8, ta, ma
-; CHECK-VRGATHER-NEXT:    vle8.v v9, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e8, mf8, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v10, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: expandload_v1i8:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    vsetivli zero, 1, e8, mf8, ta, mu
-; CHECK-INDEXED-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-NEXT:    vluxei8.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: expandload_v1i8:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 1, e8, mf8, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e8, mf8, ta, ma
+; CHECK-NEXT:    vle8.v v9, (a0)
+; CHECK-NEXT:    vsetivli zero, 1, e8, mf8, ta, mu
+; CHECK-NEXT:    viota.m v10, v0
+; CHECK-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-NEXT:    ret
   %res = call <1 x i8> @llvm.masked.expandload.v1i8(ptr %base, <1 x i1> %mask, <1 x i8> %src0)
   ret <1 x i8>%res
 }
 
 declare <2 x i8> @llvm.masked.expandload.v2i8(ptr, <2 x i1>, <2 x i8>)
 define <2 x i8> @expandload_v2i8(ptr %base, <2 x i8> %src0, <2 x i1> %mask) {
-; CHECK-VRGATHER-LABEL: expandload_v2i8:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e8, mf8, ta, ma
-; CHECK-VRGATHER-NEXT:    vle8.v v9, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e8, mf8, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v10, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: expandload_v2i8:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    vsetivli zero, 2, e8, mf8, ta, mu
-; CHECK-INDEXED-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-NEXT:    vluxei8.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: expandload_v2i8:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e8, mf8, ta, ma
+; CHECK-NEXT:    vle8.v v9, (a0)
+; CHECK-NEXT:    vsetivli zero, 2, e8, mf8, ta, mu
+; CHECK-NEXT:    viota.m v10, v0
+; CHECK-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-NEXT:    ret
   %res = call <2 x i8> @llvm.masked.expandload.v2i8(ptr %base, <2 x i1> %mask, <2 x i8> %src0)
   ret <2 x i8>%res
 }
 
 declare <4 x i8> @llvm.masked.expandload.v4i8(ptr, <4 x i1>, <4 x i8>)
 define <4 x i8> @expandload_v4i8(ptr %base, <4 x i8> %src0, <4 x i1> %mask) {
-; CHECK-VRGATHER-LABEL: expandload_v4i8:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e8, mf4, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e8, mf4, ta, ma
-; CHECK-VRGATHER-NEXT:    vle8.v v9, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e8, mf4, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v10, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: expandload_v4i8:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    vsetivli zero, 4, e8, mf4, ta, mu
-; CHECK-INDEXED-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-NEXT:    vluxei8.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: expandload_v4i8:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 4, e8, mf4, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e8, mf4, ta, ma
+; CHECK-NEXT:    vle8.v v9, (a0)
+; CHECK-NEXT:    vsetivli zero, 4, e8, mf4, ta, mu
+; CHECK-NEXT:    viota.m v10, v0
+; CHECK-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-NEXT:    ret
   %res = call <4 x i8> @llvm.masked.expandload.v4i8(ptr %base, <4 x i1> %mask, <4 x i8> %src0)
   ret <4 x i8>%res
 }
 
 declare <8 x i8> @llvm.masked.expandload.v8i8(ptr, <8 x i1>, <8 x i8>)
 define <8 x i8> @expandload_v8i8(ptr %base, <8 x i8> %src0, <8 x i1> %mask) {
-; CHECK-VRGATHER-LABEL: expandload_v8i8:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e8, mf2, ta, ma
-; CHECK-VRGATHER-NEXT:    vle8.v v9, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e8, mf2, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v10, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: expandload_v8i8:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    vsetivli zero, 8, e8, mf2, ta, mu
-; CHECK-INDEXED-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-NEXT:    vluxei8.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: expandload_v8i8:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e8, mf2, ta, ma
+; CHECK-NEXT:    vle8.v v9, (a0)
+; CHECK-NEXT:    vsetivli zero, 8, e8, mf2, ta, mu
+; CHECK-NEXT:    viota.m v10, v0
+; CHECK-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-NEXT:    ret
   %res = call <8 x i8> @llvm.masked.expandload.v8i8(ptr %base, <8 x i1> %mask, <8 x i8> %src0)
   ret <8 x i8>%res
 }
 
 declare <1 x i16> @llvm.masked.expandload.v1i16(ptr, <1 x i1>, <1 x i16>)
 define <1 x i16> @expandload_v1i16(ptr %base, <1 x i16> %src0, <1 x i1> %mask) {
-; CHECK-VRGATHER-LABEL: expandload_v1i16:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e8, mf8, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e16, mf4, ta, ma
-; CHECK-VRGATHER-NEXT:    vle16.v v9, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e16, mf4, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v10, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: expandload_v1i16:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    vsetivli zero, 1, e16, mf4, ta, ma
-; CHECK-INDEXED-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 1, v0.t
-; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e16, mf4, ta, mu
-; CHECK-INDEXED-NEXT:    vluxei16.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: expandload_v1i16:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 1, e8, mf8, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e16, mf4, ta, ma
+; CHECK-NEXT:    vle16.v v9, (a0)
+; CHECK-NEXT:    vsetivli zero, 1, e16, mf4, ta, mu
+; CHECK-NEXT:    viota.m v10, v0
+; CHECK-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-NEXT:    ret
   %res = call <1 x i16> @llvm.masked.expandload.v1i16(ptr align 2 %base, <1 x i1> %mask, <1 x i16> %src0)
   ret <1 x i16>%res
 }
 
 declare <2 x i16> @llvm.masked.expandload.v2i16(ptr, <2 x i1>, <2 x i16>)
 define <2 x i16> @expandload_v2i16(ptr %base, <2 x i16> %src0, <2 x i1> %mask) {
-; CHECK-VRGATHER-LABEL: expandload_v2i16:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e16, mf4, ta, ma
-; CHECK-VRGATHER-NEXT:    vle16.v v9, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e16, mf4, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v10, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: expandload_v2i16:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    vsetivli zero, 2, e16, mf4, ta, ma
-; CHECK-INDEXED-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 1, v0.t
-; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e16, mf4, ta, mu
-; CHECK-INDEXED-NEXT:    vluxei16.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: expandload_v2i16:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e16, mf4, ta, ma
+; CHECK-NEXT:    vle16.v v9, (a0)
+; CHECK-NEXT:    vsetivli zero, 2, e16, mf4, ta, mu
+; CHECK-NEXT:    viota.m v10, v0
+; CHECK-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-NEXT:    ret
   %res = call <2 x i16> @llvm.masked.expandload.v2i16(ptr align 2 %base, <2 x i1> %mask, <2 x i16> %src0)
   ret <2 x i16>%res
 }
 
 declare <4 x i16> @llvm.masked.expandload.v4i16(ptr, <4 x i1>, <4 x i16>)
 define <4 x i16> @expandload_v4i16(ptr %base, <4 x i16> %src0, <4 x i1> %mask) {
-; CHECK-VRGATHER-LABEL: expandload_v4i16:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e8, mf4, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e16, mf2, ta, ma
-; CHECK-VRGATHER-NEXT:    vle16.v v9, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e16, mf2, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v10, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: expandload_v4i16:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    vsetivli zero, 4, e16, mf2, ta, ma
-; CHECK-INDEXED-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 1, v0.t
-; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e16, mf2, ta, mu
-; CHECK-INDEXED-NEXT:    vluxei16.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: expandload_v4i16:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 4, e8, mf4, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e16, mf2, ta, ma
+; CHECK-NEXT:    vle16.v v9, (a0)
+; CHECK-NEXT:    vsetivli zero, 4, e16, mf2, ta, mu
+; CHECK-NEXT:    viota.m v10, v0
+; CHECK-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-NEXT:    ret
   %res = call <4 x i16> @llvm.masked.expandload.v4i16(ptr align 2 %base, <4 x i1> %mask, <4 x i16> %src0)
   ret <4 x i16>%res
 }
 
 declare <8 x i16> @llvm.masked.expandload.v8i16(ptr, <8 x i1>, <8 x i16>)
 define <8 x i16> @expandload_v8i16(ptr %base, <8 x i16> %src0, <8 x i1> %mask) {
-; CHECK-VRGATHER-LABEL: expandload_v8i16:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e16, m1, ta, ma
-; CHECK-VRGATHER-NEXT:    vle16.v v9, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e16, m1, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v10, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: expandload_v8i16:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    vsetivli zero, 8, e16, m1, ta, ma
-; CHECK-INDEXED-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 1, v0.t
-; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e16, m1, ta, mu
-; CHECK-INDEXED-NEXT:    vluxei16.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: expandload_v8i16:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e16, m1, ta, ma
+; CHECK-NEXT:    vle16.v v9, (a0)
+; CHECK-NEXT:    vsetivli zero, 8, e16, m1, ta, mu
+; CHECK-NEXT:    viota.m v10, v0
+; CHECK-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-NEXT:    ret
   %res = call <8 x i16> @llvm.masked.expandload.v8i16(ptr align 2 %base, <8 x i1> %mask, <8 x i16> %src0)
   ret <8 x i16>%res
 }
 
 declare <1 x i32> @llvm.masked.expandload.v1i32(ptr, <1 x i1>, <1 x i32>)
 define <1 x i32> @expandload_v1i32(ptr %base, <1 x i32> %src0, <1 x i1> %mask) {
-; CHECK-VRGATHER-LABEL: expandload_v1i32:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e8, mf8, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e32, mf2, ta, ma
-; CHECK-VRGATHER-NEXT:    vle32.v v9, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e32, mf2, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v10, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: expandload_v1i32:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    vsetivli zero, 1, e32, mf2, ta, ma
-; CHECK-INDEXED-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 2, v0.t
-; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e32, mf2, ta, mu
-; CHECK-INDEXED-NEXT:    vluxei32.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: expandload_v1i32:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 1, e8, mf8, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e32, mf2, ta, ma
+; CHECK-NEXT:    vle32.v v9, (a0)
+; CHECK-NEXT:    vsetivli zero, 1, e32, mf2, ta, mu
+; CHECK-NEXT:    viota.m v10, v0
+; CHECK-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-NEXT:    ret
   %res = call <1 x i32> @llvm.masked.expandload.v1i32(ptr align 4 %base, <1 x i1> %mask, <1 x i32> %src0)
   ret <1 x i32>%res
 }
 
 declare <2 x i32> @llvm.masked.expandload.v2i32(ptr, <2 x i1>, <2 x i32>)
 define <2 x i32> @expandload_v2i32(ptr %base, <2 x i32> %src0, <2 x i1> %mask) {
-; CHECK-VRGATHER-LABEL: expandload_v2i32:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e32, mf2, ta, ma
-; CHECK-VRGATHER-NEXT:    vle32.v v9, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e32, mf2, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v10, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: expandload_v2i32:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
-; CHECK-INDEXED-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 2, v0.t
-; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e32, mf2, ta, mu
-; CHECK-INDEXED-NEXT:    vluxei32.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: expandload_v2i32:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e32, mf2, ta, ma
+; CHECK-NEXT:    vle32.v v9, (a0)
+; CHECK-NEXT:    vsetivli zero, 2, e32, mf2, ta, mu
+; CHECK-NEXT:    viota.m v10, v0
+; CHECK-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-NEXT:    ret
   %res = call <2 x i32> @llvm.masked.expandload.v2i32(ptr align 4 %base, <2 x i1> %mask, <2 x i32> %src0)
   ret <2 x i32>%res
 }
 
 declare <4 x i32> @llvm.masked.expandload.v4i32(ptr, <4 x i1>, <4 x i32>)
 define <4 x i32> @expandload_v4i32(ptr %base, <4 x i32> %src0, <4 x i1> %mask) {
-; CHECK-VRGATHER-LABEL: expandload_v4i32:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e8, mf4, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e32, m1, ta, ma
-; CHECK-VRGATHER-NEXT:    vle32.v v9, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e32, m1, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v10, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: expandload_v4i32:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
-; CHECK-INDEXED-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-NEXT:    vsll.vi v9, v9, 2, v0.t
-; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e32, m1, ta, mu
-; CHECK-INDEXED-NEXT:    vluxei32.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: expandload_v4i32:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 4, e8, mf4, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e32, m1, ta, ma
+; CHECK-NEXT:    vle32.v v9, (a0)
+; CHECK-NEXT:    vsetivli zero, 4, e32, m1, ta, mu
+; CHECK-NEXT:    viota.m v10, v0
+; CHECK-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-NEXT:    ret
   %res = call <4 x i32> @llvm.masked.expandload.v4i32(ptr align 4 %base, <4 x i1> %mask, <4 x i32> %src0)
   ret <4 x i32>%res
 }
 
 declare <8 x i32> @llvm.masked.expandload.v8i32(ptr, <8 x i1>, <8 x i32>)
 define <8 x i32> @expandload_v8i32(ptr %base, <8 x i32> %src0, <8 x i1> %mask) {
-; CHECK-VRGATHER-LABEL: expandload_v8i32:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e32, m2, ta, ma
-; CHECK-VRGATHER-NEXT:    vle32.v v10, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e32, m2, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v12, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v10, v12, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-LABEL: expandload_v8i32:
-; CHECK-INDEXED:       # %bb.0:
-; CHECK-INDEXED-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
-; CHECK-INDEXED-NEXT:    viota.m v10, v0
-; CHECK-INDEXED-NEXT:    vsll.vi v10, v10, 2, v0.t
-; CHECK-INDEXED-NEXT:    vsetvli zero, zero, e32, m2, ta, mu
-; CHECK-INDEXED-NEXT:    vluxei32.v v8, (a0), v10, v0.t
-; CHECK-INDEXED-NEXT:    ret
+; CHECK-LABEL: expandload_v8i32:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e32, m2, ta, ma
+; CHECK-NEXT:    vle32.v v10, (a0)
+; CHECK-NEXT:    vsetivli zero, 8, e32, m2, ta, mu
+; CHECK-NEXT:    viota.m v12, v0
+; CHECK-NEXT:    vrgather.vv v8, v10, v12, v0.t
+; CHECK-NEXT:    ret
   %res = call <8 x i32> @llvm.masked.expandload.v8i32(ptr align 4 %base, <8 x i1> %mask, <8 x i32> %src0)
   ret <8 x i32>%res
 }
 
 declare <1 x i64> @llvm.masked.expandload.v1i64(ptr, <1 x i1>, <1 x i64>)
 define <1 x i64> @expandload_v1i64(ptr %base, <1 x i64> %src0, <1 x i1> %mask) {
-; CHECK-VRGATHER-LABEL: expandload_v1i64:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e8, mf8, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
-; CHECK-VRGATHER-NEXT:    vle64.v v9, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 1, e64, m1, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v10, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-RV32-LABEL: expandload_v1i64:
-; CHECK-INDEXED-RV32:       # %bb.0:
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 1, e32, mf2, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-RV32-NEXT:    vsll.vi v9, v9, 3, v0.t
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
-; CHECK-INDEXED-RV32-NEXT:    vluxei32.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-RV32-NEXT:    ret
-;
-; CHECK-INDEXED-RV64-LABEL: expandload_v1i64:
-; CHECK-INDEXED-RV64:       # %bb.0:
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 1, e64, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-RV64-NEXT:    vsll.vi v9, v9, 3, v0.t
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
-; CHECK-INDEXED-RV64-NEXT:    vluxei64.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-RV64-NEXT:    ret
+; CHECK-LABEL: expandload_v1i64:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 1, e8, mf8, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
+; CHECK-NEXT:    vle64.v v9, (a0)
+; CHECK-NEXT:    vsetivli zero, 1, e64, m1, ta, mu
+; CHECK-NEXT:    viota.m v10, v0
+; CHECK-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-NEXT:    ret
   %res = call <1 x i64> @llvm.masked.expandload.v1i64(ptr align 8 %base, <1 x i1> %mask, <1 x i64> %src0)
   ret <1 x i64>%res
 }
 
 declare <2 x i64> @llvm.masked.expandload.v2i64(ptr, <2 x i1>, <2 x i64>)
 define <2 x i64> @expandload_v2i64(ptr %base, <2 x i64> %src0, <2 x i1> %mask) {
-; CHECK-VRGATHER-LABEL: expandload_v2i64:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
-; CHECK-VRGATHER-NEXT:    vle64.v v9, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 2, e64, m1, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v10, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v9, v10, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-RV32-LABEL: expandload_v2i64:
-; CHECK-INDEXED-RV32:       # %bb.0:
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-RV32-NEXT:    vsll.vi v9, v9, 3, v0.t
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
-; CHECK-INDEXED-RV32-NEXT:    vluxei32.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-RV32-NEXT:    ret
-;
-; CHECK-INDEXED-RV64-LABEL: expandload_v2i64:
-; CHECK-INDEXED-RV64:       # %bb.0:
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 2, e64, m1, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    viota.m v9, v0
-; CHECK-INDEXED-RV64-NEXT:    vsll.vi v9, v9, 3, v0.t
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e64, m1, ta, mu
-; CHECK-INDEXED-RV64-NEXT:    vluxei64.v v8, (a0), v9, v0.t
-; CHECK-INDEXED-RV64-NEXT:    ret
+; CHECK-LABEL: expandload_v2i64:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 2, e8, mf8, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e64, m1, ta, ma
+; CHECK-NEXT:    vle64.v v9, (a0)
+; CHECK-NEXT:    vsetivli zero, 2, e64, m1, ta, mu
+; CHECK-NEXT:    viota.m v10, v0
+; CHECK-NEXT:    vrgather.vv v8, v9, v10, v0.t
+; CHECK-NEXT:    ret
   %res = call <2 x i64> @llvm.masked.expandload.v2i64(ptr align 8 %base, <2 x i1> %mask, <2 x i64> %src0)
   ret <2 x i64>%res
 }
 
 declare <4 x i64> @llvm.masked.expandload.v4i64(ptr, <4 x i1>, <4 x i64>)
 define <4 x i64> @expandload_v4i64(ptr %base, <4 x i64> %src0, <4 x i1> %mask) {
-; CHECK-VRGATHER-LABEL: expandload_v4i64:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e8, mf4, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e64, m2, ta, ma
-; CHECK-VRGATHER-NEXT:    vle64.v v10, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 4, e64, m2, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v12, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v10, v12, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-RV32-LABEL: expandload_v4i64:
-; CHECK-INDEXED-RV32:       # %bb.0:
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 4, e32, m1, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    viota.m v10, v0
-; CHECK-INDEXED-RV32-NEXT:    vsll.vi v10, v10, 3, v0.t
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e64, m2, ta, mu
-; CHECK-INDEXED-RV32-NEXT:    vluxei32.v v8, (a0), v10, v0.t
-; CHECK-INDEXED-RV32-NEXT:    ret
-;
-; CHECK-INDEXED-RV64-LABEL: expandload_v4i64:
-; CHECK-INDEXED-RV64:       # %bb.0:
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 4, e64, m2, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    viota.m v10, v0
-; CHECK-INDEXED-RV64-NEXT:    vsll.vi v10, v10, 3, v0.t
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e64, m2, ta, mu
-; CHECK-INDEXED-RV64-NEXT:    vluxei64.v v8, (a0), v10, v0.t
-; CHECK-INDEXED-RV64-NEXT:    ret
+; CHECK-LABEL: expandload_v4i64:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 4, e8, mf4, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e64, m2, ta, ma
+; CHECK-NEXT:    vle64.v v10, (a0)
+; CHECK-NEXT:    vsetivli zero, 4, e64, m2, ta, mu
+; CHECK-NEXT:    viota.m v12, v0
+; CHECK-NEXT:    vrgather.vv v8, v10, v12, v0.t
+; CHECK-NEXT:    ret
   %res = call <4 x i64> @llvm.masked.expandload.v4i64(ptr align 8 %base, <4 x i1> %mask, <4 x i64> %src0)
   ret <4 x i64>%res
 }
 
 declare <8 x i64> @llvm.masked.expandload.v8i64(ptr, <8 x i1>, <8 x i64>)
 define <8 x i64> @expandload_v8i64(ptr %base, <8 x i64> %src0, <8 x i1> %mask) {
-; CHECK-VRGATHER-LABEL: expandload_v8i64:
-; CHECK-VRGATHER:       # %bb.0:
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
-; CHECK-VRGATHER-NEXT:    vcpop.m a1, v0
-; CHECK-VRGATHER-NEXT:    vsetvli zero, a1, e64, m4, ta, ma
-; CHECK-VRGATHER-NEXT:    vle64.v v12, (a0)
-; CHECK-VRGATHER-NEXT:    vsetivli zero, 8, e64, m4, ta, mu
-; CHECK-VRGATHER-NEXT:    viota.m v16, v0
-; CHECK-VRGATHER-NEXT:    vrgather.vv v8, v12, v16, v0.t
-; CHECK-VRGATHER-NEXT:    ret
-;
-; CHECK-INDEXED-RV32-LABEL: expandload_v8i64:
-; CHECK-INDEXED-RV32:       # %bb.0:
-; CHECK-INDEXED-RV32-NEXT:    vsetivli zero, 8, e32, m2, ta, ma
-; CHECK-INDEXED-RV32-NEXT:    viota.m v12, v0
-; CHECK-INDEXED-RV32-NEXT:    vsll.vi v12, v12, 3, v0.t
-; CHECK-INDEXED-RV32-NEXT:    vsetvli zero, zero, e64, m4, ta, mu
-; CHECK-INDEXED-RV32-NEXT:    vluxei32.v v8, (a0), v12, v0.t
-; CHECK-INDEXED-RV32-NEXT:    ret
-;
-; CHECK-INDEXED-RV64-LABEL: expandload_v8i64:
-; CHECK-INDEXED-RV64:       # %bb.0:
-; CHECK-INDEXED-RV64-NEXT:    vsetivli zero, 8, e64, m4, ta, ma
-; CHECK-INDEXED-RV64-NEXT:    viota.m v12, v0
-; CHECK-INDEXED-RV64-NEXT:    vsll.vi v12, v12, 3, v0.t
-; CHECK-INDEXED-RV64-NEXT:    vsetvli zero, zero, e64, m4, ta, mu
-; CHECK-INDEXED-RV64-NEXT:    vluxei64.v v8, (a0), v12, v0.t
-; CHECK-INDEXED-RV64-NEXT:    ret
+; CHECK-LABEL: expandload_v8i64:
+; CHECK:       # %bb.0:
+; CHECK-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
+; CHECK-NEXT:    vcpop.m a1, v0
+; CHECK-NEXT:    vsetvli zero, a1, e64, m4, ta, ma
+; CHECK-NEXT:    vle64.v v12, (a0)
+; CHECK-NEXT:    vsetivli zero, 8, e64, m4, ta, mu
+; CHECK-NEXT:    viota.m v16, v0
+; CHECK-NEXT:    vrgather.vv v8, v12, v16, v0.t
+; CHECK-NEXT:    ret
   %res = call <8 x i64> @llvm.masked.expandload.v8i64(ptr align 8 %base, <8 x i1> %mask, <8 x i64> %src0)
   ret <8 x i64>%res
 }
 ;; NOTE: These prefixes are unused and the list is autogenerated. Do not add tests below this line:
-; CHECK-VRGATHER-RV32: {{.*}}
-; CHECK-VRGATHER-RV64: {{.*}}
+; CHECK-RV32: {{.*}}
+; CHECK-RV64: {{.*}}

>From 0e35b2cae23d906189c6e68d1e5a96e62f336d23 Mon Sep 17 00:00:00 2001
From: Wang Pengcheng <wangpengcheng.pp at bytedance.com>
Date: Wed, 30 Oct 2024 16:41:07 +0800
Subject: [PATCH 15/15] clang-format

---
 llvm/lib/Target/RISCV/RISCVISelLowering.cpp | 5 ++---
 1 file changed, 2 insertions(+), 3 deletions(-)

diff --git a/llvm/lib/Target/RISCV/RISCVISelLowering.cpp b/llvm/lib/Target/RISCV/RISCVISelLowering.cpp
index 7c16798df638b9..c15648c7b4f099 100644
--- a/llvm/lib/Target/RISCV/RISCVISelLowering.cpp
+++ b/llvm/lib/Target/RISCV/RISCVISelLowering.cpp
@@ -11144,9 +11144,8 @@ SDValue RISCVTargetLowering::lowerMaskedLoad(SDValue Op,
                     getAllOnesMask(Mask.getSimpleValueType(), VL, DL, DAG), VL);
   }
 
-  unsigned IntID = IsUnmasked || IsExpandingLoad
-                       ? Intrinsic::riscv_vle
-                       : Intrinsic::riscv_vle_mask;
+  unsigned IntID = IsUnmasked || IsExpandingLoad ? Intrinsic::riscv_vle
+                                                 : Intrinsic::riscv_vle_mask;
   SmallVector<SDValue, 8> Ops{Chain, DAG.getTargetConstant(IntID, DL, XLenVT)};
   if (IntID == Intrinsic::riscv_vle)
     Ops.push_back(DAG.getUNDEF(ContainerVT));



More information about the llvm-commits mailing list