[clang] deca5e8 - [RISCV] Support vreinterpret intrinsics between vector boolean type and m1 vector integer type

via cfe-commits cfe-commits at lists.llvm.org
Mon May 15 18:04:30 PDT 2023


Author: eopXD
Date: 2023-05-15T18:04:24-07:00
New Revision: deca5e8f5038e70cbf2349e5df91109cae214ee7

URL: https://github.com/llvm/llvm-project/commit/deca5e8f5038e70cbf2349e5df91109cae214ee7
DIFF: https://github.com/llvm/llvm-project/commit/deca5e8f5038e70cbf2349e5df91109cae214ee7.diff

LOG: [RISCV] Support vreinterpret intrinsics between vector boolean type and m1 vector integer type

Link to specification: [riscv-non-isa/rvv-intrinsic-doc#221](https://github.com/riscv-non-isa/rvv-intrinsic-doc/pull/221)

Reviewed By: craig.topper

Differential Revision: https://reviews.llvm.org/D149642

Added: 
    

Modified: 
    clang/include/clang/Basic/riscv_vector.td
    clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vreinterpret.c
    clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vreinterpret.c

Removed: 
    


################################################################################
diff  --git a/clang/include/clang/Basic/riscv_vector.td b/clang/include/clang/Basic/riscv_vector.td
index 8eb873dfafc79..b56ee074c20ca 100644
--- a/clang/include/clang/Basic/riscv_vector.td
+++ b/clang/include/clang/Basic/riscv_vector.td
@@ -2030,6 +2030,46 @@ let HasMasked = false,
 let HasMasked = false, HasVL = false, IRName = "" in {
   let Name = "vreinterpret_v", MaskedPolicyScheme = NonePolicy,
       ManualCodegen = [{
+        if (ResultType->isIntOrIntVectorTy(1) ||
+            Ops[0]->getType()->isIntOrIntVectorTy(1)) {
+          assert(isa<ScalableVectorType>(ResultType) &&
+                 isa<ScalableVectorType>(Ops[0]->getType()));
+
+          LLVMContext &Context = CGM.getLLVMContext();
+          ScalableVectorType *Boolean64Ty =
+              ScalableVectorType::get(llvm::Type::getInt1Ty(Context), 64);
+
+          if (ResultType->isIntOrIntVectorTy(1)) {
+            // Casting from m1 vector integer -> vector boolean
+            // Ex: <vscale x 8 x i8>
+            //     --(bitcast)--------> <vscale x 64 x i1>
+            //     --(vector_extract)-> <vscale x  8 x i1>
+            llvm::Value *BitCast = Builder.CreateBitCast(Ops[0], Boolean64Ty);
+
+            ID = Intrinsic::vector_extract;
+            llvm::Value *Operands[2];
+            Operands[0] = BitCast;
+            Operands[1] = ConstantInt::get(Int64Ty, 0);
+            IntrinsicTypes = {ResultType, Boolean64Ty};
+
+            return Builder.CreateCall(CGM.getIntrinsic(ID, IntrinsicTypes), Operands, "");
+          } else {
+            // Casting from vector boolean -> m1 vector integer
+            // Ex: <vscale x  1 x i1>
+            //       --(vector_insert)-> <vscale x 64 x i1>
+            //       --(bitcast)-------> <vscale x  8 x i8>
+            ID = Intrinsic::vector_insert;
+            llvm::Value *Operands[3];
+            Operands[0] = llvm::PoisonValue::get(Boolean64Ty);
+            Operands[1] = Ops[0];
+            Operands[2] = ConstantInt::get(Int64Ty, 0);
+            IntrinsicTypes = {Boolean64Ty, Ops[0]->getType()};
+            llvm::Value *Boolean64Val =
+              Builder.CreateCall(CGM.getIntrinsic(ID, IntrinsicTypes), Operands, "");
+
+            return Builder.CreateBitCast(Boolean64Val, ResultType);
+          }
+        }
         return Builder.CreateBitCast(Ops[0], ResultType);
       }] in {
     // Reinterpret between 
diff erent type under the same SEW and LMUL
@@ -2048,6 +2088,53 @@ let HasMasked = false, HasVL = false, IRName = "" in {
       def vreinterpret_u_ # dst_sew : RVVBuiltin<"Uv" # dst_sew # "Uv",
                                                  dst_sew # "UvUv", "csil", dst_sew # "Uv">;
     }
+
+    // Existing users of FixedSEW - the reinterpretation between 
diff erent SEW
+    // and same LMUL has the implicit assumption that if FixedSEW is set to the
+    // given element width, then the type will be identified as invalid, thus
+    // skipping definition of reinterpret of SEW=8 to SEW=8. However this blocks
+    // our usage here of defining all possible combinations of a fixed SEW to
+    // any boolean. So we need to separately define SEW=8 here.
+    // Reinterpret from LMUL=1 integer type to vector boolean type
+    def vreintrepret_m1_b8_signed :
+        RVVBuiltin<"Svm",
+                    "mSv",
+                    "c", "m">;
+    def vreintrepret_m1_b8_usigned :
+        RVVBuiltin<"USvm",
+                    "mUSv",
+                    "c", "m">;
+
+    // Reinterpret from vector boolean type to LMUL=1 integer type
+    def vreintrepret_b8_m1_signed :
+        RVVBuiltin<"mSv",
+                    "Svm",
+                    "c", "Sv">;
+    def vreintrepret_b8_m1_usigned :
+        RVVBuiltin<"mUSv",
+                    "USvm",
+                    "c", "USv">;
+
+    foreach dst_sew = ["16", "32", "64"] in {
+      // Reinterpret from LMUL=1 integer type to vector boolean type
+      def vreinterpret_m1_b # dst_sew # _signed:
+        RVVBuiltin<"(FixedSEW:" # dst_sew # ")Svm",
+                    "m(FixedSEW:" # dst_sew # ")Sv",
+                    "c", "m">;
+      def vreinterpret_m1_b # dst_sew # _unsigned:
+        RVVBuiltin<"(FixedSEW:" # dst_sew # ")USvm",
+                    "m(FixedSEW:" # dst_sew # ")USv",
+                    "c", "m">;
+      // Reinterpret from vector boolean type to LMUL=1 integer type
+      def vreinterpret_b # dst_sew # _m1_signed:
+        RVVBuiltin<"m(FixedSEW:" # dst_sew # ")Sv",
+                    "(FixedSEW:" # dst_sew # ")Svm",
+                    "c", "(FixedSEW:" # dst_sew # ")Sv">;
+      def vreinterpret_b # dst_sew # _m1_unsigned:
+        RVVBuiltin<"m(FixedSEW:" # dst_sew # ")USv",
+                    "(FixedSEW:" # dst_sew # ")USvm",
+                    "c", "(FixedSEW:" # dst_sew # ")USv">;
+    }
   }
 
   let Name = "vundefined", SupportOverloading = false,

diff  --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vreinterpret.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vreinterpret.c
index 791a4f96a3b99..c41256f2c2a15 100644
--- a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vreinterpret.c
+++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vreinterpret.c
@@ -2123,3 +2123,971 @@ vuint32m8_t test_vreinterpret_v_u64m8_u32m8(vuint64m8_t src) {
   return __riscv_vreinterpret_v_u64m8_u32m8(src);
 }
 
+// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i1> @test_vreinterpret_v_i8m1_b64
+// CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 1 x i1> @llvm.vector.extract.nxv1i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 1 x i1> [[TMP1]]
+//
+vbool64_t test_vreinterpret_v_i8m1_b64(vint8m1_t src) {
+  return __riscv_vreinterpret_v_i8m1_b64(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b64_i8m1
+// CHECK-RV64-SAME: (<vscale x 1 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv1i1(<vscale x 64 x i1> poison, <vscale x 1 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP1]]
+//
+vint8m1_t test_vreinterpret_v_b64_i8m1(vbool64_t src) {
+  return __riscv_vreinterpret_v_b64_i8m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i1> @test_vreinterpret_v_i8m1_b32
+// CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 2 x i1> @llvm.vector.extract.nxv2i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 2 x i1> [[TMP1]]
+//
+vbool32_t test_vreinterpret_v_i8m1_b32(vint8m1_t src) {
+  return __riscv_vreinterpret_v_i8m1_b32(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b32_i8m1
+// CHECK-RV64-SAME: (<vscale x 2 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv2i1(<vscale x 64 x i1> poison, <vscale x 2 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP1]]
+//
+vint8m1_t test_vreinterpret_v_b32_i8m1(vbool32_t src) {
+  return __riscv_vreinterpret_v_b32_i8m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i1> @test_vreinterpret_v_i8m1_b16
+// CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 4 x i1> @llvm.vector.extract.nxv4i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 4 x i1> [[TMP1]]
+//
+vbool16_t test_vreinterpret_v_i8m1_b16(vint8m1_t src) {
+  return __riscv_vreinterpret_v_i8m1_b16(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b16_i8m1
+// CHECK-RV64-SAME: (<vscale x 4 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv4i1(<vscale x 64 x i1> poison, <vscale x 4 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP1]]
+//
+vint8m1_t test_vreinterpret_v_b16_i8m1(vbool16_t src) {
+  return __riscv_vreinterpret_v_b16_i8m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i1> @test_vreinterpret_v_i8m1_b8
+// CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 8 x i1> @llvm.vector.extract.nxv8i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i1> [[TMP1]]
+//
+vbool8_t test_vreinterpret_v_i8m1_b8(vint8m1_t src) {
+  return __riscv_vreinterpret_v_i8m1_b8(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b8_i8m1
+// CHECK-RV64-SAME: (<vscale x 8 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv8i1(<vscale x 64 x i1> poison, <vscale x 8 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP1]]
+//
+vint8m1_t test_vreinterpret_v_b8_i8m1(vbool8_t src) {
+  return __riscv_vreinterpret_v_b8_i8m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i1> @test_vreinterpret_v_i8m1_b4
+// CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 16 x i1> @llvm.vector.extract.nxv16i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 16 x i1> [[TMP1]]
+//
+vbool4_t test_vreinterpret_v_i8m1_b4(vint8m1_t src) {
+  return __riscv_vreinterpret_v_i8m1_b4(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b4_i8m1
+// CHECK-RV64-SAME: (<vscale x 16 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv16i1(<vscale x 64 x i1> poison, <vscale x 16 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP1]]
+//
+vint8m1_t test_vreinterpret_v_b4_i8m1(vbool4_t src) {
+  return __riscv_vreinterpret_v_b4_i8m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 32 x i1> @test_vreinterpret_v_i8m1_b2
+// CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 32 x i1> @llvm.vector.extract.nxv32i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 32 x i1> [[TMP1]]
+//
+vbool2_t test_vreinterpret_v_i8m1_b2(vint8m1_t src) {
+  return __riscv_vreinterpret_v_i8m1_b2(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b2_i8m1
+// CHECK-RV64-SAME: (<vscale x 32 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv32i1(<vscale x 64 x i1> poison, <vscale x 32 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP1]]
+//
+vint8m1_t test_vreinterpret_v_b2_i8m1(vbool2_t src) {
+  return __riscv_vreinterpret_v_b2_i8m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 64 x i1> @test_vreinterpret_v_i8m1_b1
+// CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 64 x i1> @llvm.vector.extract.nxv64i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 64 x i1> [[TMP1]]
+//
+vbool1_t test_vreinterpret_v_i8m1_b1(vint8m1_t src) {
+  return __riscv_vreinterpret_v_i8m1_b1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b1_i8m1
+// CHECK-RV64-SAME: (<vscale x 64 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv64i1(<vscale x 64 x i1> poison, <vscale x 64 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP1]]
+//
+vint8m1_t test_vreinterpret_v_b1_i8m1(vbool1_t src) {
+  return __riscv_vreinterpret_v_b1_i8m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i1> @test_vreinterpret_v_u8m1_b64
+// CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 1 x i1> @llvm.vector.extract.nxv1i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 1 x i1> [[TMP1]]
+//
+vbool64_t test_vreinterpret_v_u8m1_b64(vuint8m1_t src) {
+  return __riscv_vreinterpret_v_u8m1_b64(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b64_u8m1
+// CHECK-RV64-SAME: (<vscale x 1 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv1i1(<vscale x 64 x i1> poison, <vscale x 1 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP1]]
+//
+vuint8m1_t test_vreinterpret_v_b64_u8m1(vbool64_t src) {
+  return __riscv_vreinterpret_v_b64_u8m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i1> @test_vreinterpret_v_u8m1_b32
+// CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 2 x i1> @llvm.vector.extract.nxv2i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 2 x i1> [[TMP1]]
+//
+vbool32_t test_vreinterpret_v_u8m1_b32(vuint8m1_t src) {
+  return __riscv_vreinterpret_v_u8m1_b32(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b32_u8m1
+// CHECK-RV64-SAME: (<vscale x 2 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv2i1(<vscale x 64 x i1> poison, <vscale x 2 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP1]]
+//
+vuint8m1_t test_vreinterpret_v_b32_u8m1(vbool32_t src) {
+  return __riscv_vreinterpret_v_b32_u8m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i1> @test_vreinterpret_v_u8m1_b16
+// CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 4 x i1> @llvm.vector.extract.nxv4i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 4 x i1> [[TMP1]]
+//
+vbool16_t test_vreinterpret_v_u8m1_b16(vuint8m1_t src) {
+  return __riscv_vreinterpret_v_u8m1_b16(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b16_u8m1
+// CHECK-RV64-SAME: (<vscale x 4 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv4i1(<vscale x 64 x i1> poison, <vscale x 4 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP1]]
+//
+vuint8m1_t test_vreinterpret_v_b16_u8m1(vbool16_t src) {
+  return __riscv_vreinterpret_v_b16_u8m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i1> @test_vreinterpret_v_u8m1_b8
+// CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 8 x i1> @llvm.vector.extract.nxv8i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i1> [[TMP1]]
+//
+vbool8_t test_vreinterpret_v_u8m1_b8(vuint8m1_t src) {
+  return __riscv_vreinterpret_v_u8m1_b8(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b8_u8m1
+// CHECK-RV64-SAME: (<vscale x 8 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv8i1(<vscale x 64 x i1> poison, <vscale x 8 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP1]]
+//
+vuint8m1_t test_vreinterpret_v_b8_u8m1(vbool8_t src) {
+  return __riscv_vreinterpret_v_b8_u8m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i1> @test_vreinterpret_v_u8m1_b4
+// CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 16 x i1> @llvm.vector.extract.nxv16i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 16 x i1> [[TMP1]]
+//
+vbool4_t test_vreinterpret_v_u8m1_b4(vuint8m1_t src) {
+  return __riscv_vreinterpret_v_u8m1_b4(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b4_u8m1
+// CHECK-RV64-SAME: (<vscale x 16 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv16i1(<vscale x 64 x i1> poison, <vscale x 16 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP1]]
+//
+vuint8m1_t test_vreinterpret_v_b4_u8m1(vbool4_t src) {
+  return __riscv_vreinterpret_v_b4_u8m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 32 x i1> @test_vreinterpret_v_u8m1_b2
+// CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 32 x i1> @llvm.vector.extract.nxv32i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 32 x i1> [[TMP1]]
+//
+vbool2_t test_vreinterpret_v_u8m1_b2(vuint8m1_t src) {
+  return __riscv_vreinterpret_v_u8m1_b2(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b2_u8m1
+// CHECK-RV64-SAME: (<vscale x 32 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv32i1(<vscale x 64 x i1> poison, <vscale x 32 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP1]]
+//
+vuint8m1_t test_vreinterpret_v_b2_u8m1(vbool2_t src) {
+  return __riscv_vreinterpret_v_b2_u8m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 64 x i1> @test_vreinterpret_v_u8m1_b1
+// CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 64 x i1> @llvm.vector.extract.nxv64i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 64 x i1> [[TMP1]]
+//
+vbool1_t test_vreinterpret_v_u8m1_b1(vuint8m1_t src) {
+  return __riscv_vreinterpret_v_u8m1_b1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b1_u8m1
+// CHECK-RV64-SAME: (<vscale x 64 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv64i1(<vscale x 64 x i1> poison, <vscale x 64 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP1]]
+//
+vuint8m1_t test_vreinterpret_v_b1_u8m1(vbool1_t src) {
+  return __riscv_vreinterpret_v_b1_u8m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i1> @test_vreinterpret_v_i16m1_b64
+// CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 1 x i1> @llvm.vector.extract.nxv1i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 1 x i1> [[TMP1]]
+//
+vbool64_t test_vreinterpret_v_i16m1_b64(vint16m1_t src) {
+  return __riscv_vreinterpret_v_i16m1_b64(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_b64_i16m1
+// CHECK-RV64-SAME: (<vscale x 1 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv1i1(<vscale x 64 x i1> poison, <vscale x 1 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 4 x i16>
+// CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP1]]
+//
+vint16m1_t test_vreinterpret_v_b64_i16m1(vbool64_t src) {
+  return __riscv_vreinterpret_v_b64_i16m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i1> @test_vreinterpret_v_i16m1_b32
+// CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 2 x i1> @llvm.vector.extract.nxv2i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 2 x i1> [[TMP1]]
+//
+vbool32_t test_vreinterpret_v_i16m1_b32(vint16m1_t src) {
+  return __riscv_vreinterpret_v_i16m1_b32(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_b32_i16m1
+// CHECK-RV64-SAME: (<vscale x 2 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv2i1(<vscale x 64 x i1> poison, <vscale x 2 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 4 x i16>
+// CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP1]]
+//
+vint16m1_t test_vreinterpret_v_b32_i16m1(vbool32_t src) {
+  return __riscv_vreinterpret_v_b32_i16m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i1> @test_vreinterpret_v_i16m1_b16
+// CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 4 x i1> @llvm.vector.extract.nxv4i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 4 x i1> [[TMP1]]
+//
+vbool16_t test_vreinterpret_v_i16m1_b16(vint16m1_t src) {
+  return __riscv_vreinterpret_v_i16m1_b16(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_b16_i16m1
+// CHECK-RV64-SAME: (<vscale x 4 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv4i1(<vscale x 64 x i1> poison, <vscale x 4 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 4 x i16>
+// CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP1]]
+//
+vint16m1_t test_vreinterpret_v_b16_i16m1(vbool16_t src) {
+  return __riscv_vreinterpret_v_b16_i16m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i1> @test_vreinterpret_v_i16m1_b8
+// CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 8 x i1> @llvm.vector.extract.nxv8i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i1> [[TMP1]]
+//
+vbool8_t test_vreinterpret_v_i16m1_b8(vint16m1_t src) {
+  return __riscv_vreinterpret_v_i16m1_b8(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_b8_i16m1
+// CHECK-RV64-SAME: (<vscale x 8 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv8i1(<vscale x 64 x i1> poison, <vscale x 8 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 4 x i16>
+// CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP1]]
+//
+vint16m1_t test_vreinterpret_v_b8_i16m1(vbool8_t src) {
+  return __riscv_vreinterpret_v_b8_i16m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i1> @test_vreinterpret_v_i16m1_b4
+// CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 16 x i1> @llvm.vector.extract.nxv16i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 16 x i1> [[TMP1]]
+//
+vbool4_t test_vreinterpret_v_i16m1_b4(vint16m1_t src) {
+  return __riscv_vreinterpret_v_i16m1_b4(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_b4_i16m1
+// CHECK-RV64-SAME: (<vscale x 16 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv16i1(<vscale x 64 x i1> poison, <vscale x 16 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 4 x i16>
+// CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP1]]
+//
+vint16m1_t test_vreinterpret_v_b4_i16m1(vbool4_t src) {
+  return __riscv_vreinterpret_v_b4_i16m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 32 x i1> @test_vreinterpret_v_i16m1_b2
+// CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 32 x i1> @llvm.vector.extract.nxv32i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 32 x i1> [[TMP1]]
+//
+vbool2_t test_vreinterpret_v_i16m1_b2(vint16m1_t src) {
+  return __riscv_vreinterpret_v_i16m1_b2(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_b2_i16m1
+// CHECK-RV64-SAME: (<vscale x 32 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv32i1(<vscale x 64 x i1> poison, <vscale x 32 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 4 x i16>
+// CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP1]]
+//
+vint16m1_t test_vreinterpret_v_b2_i16m1(vbool2_t src) {
+  return __riscv_vreinterpret_v_b2_i16m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i1> @test_vreinterpret_v_u16m1_b64
+// CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 1 x i1> @llvm.vector.extract.nxv1i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 1 x i1> [[TMP1]]
+//
+vbool64_t test_vreinterpret_v_u16m1_b64(vuint16m1_t src) {
+  return __riscv_vreinterpret_v_u16m1_b64(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_b64_u16m1
+// CHECK-RV64-SAME: (<vscale x 1 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv1i1(<vscale x 64 x i1> poison, <vscale x 1 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 4 x i16>
+// CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP1]]
+//
+vuint16m1_t test_vreinterpret_v_b64_u16m1(vbool64_t src) {
+  return __riscv_vreinterpret_v_b64_u16m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i1> @test_vreinterpret_v_u16m1_b32
+// CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 2 x i1> @llvm.vector.extract.nxv2i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 2 x i1> [[TMP1]]
+//
+vbool32_t test_vreinterpret_v_u16m1_b32(vuint16m1_t src) {
+  return __riscv_vreinterpret_v_u16m1_b32(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_b32_u16m1
+// CHECK-RV64-SAME: (<vscale x 2 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv2i1(<vscale x 64 x i1> poison, <vscale x 2 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 4 x i16>
+// CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP1]]
+//
+vuint16m1_t test_vreinterpret_v_b32_u16m1(vbool32_t src) {
+  return __riscv_vreinterpret_v_b32_u16m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i1> @test_vreinterpret_v_u16m1_b16
+// CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 4 x i1> @llvm.vector.extract.nxv4i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 4 x i1> [[TMP1]]
+//
+vbool16_t test_vreinterpret_v_u16m1_b16(vuint16m1_t src) {
+  return __riscv_vreinterpret_v_u16m1_b16(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_b16_u16m1
+// CHECK-RV64-SAME: (<vscale x 4 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv4i1(<vscale x 64 x i1> poison, <vscale x 4 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 4 x i16>
+// CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP1]]
+//
+vuint16m1_t test_vreinterpret_v_b16_u16m1(vbool16_t src) {
+  return __riscv_vreinterpret_v_b16_u16m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i1> @test_vreinterpret_v_u16m1_b8
+// CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 8 x i1> @llvm.vector.extract.nxv8i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i1> [[TMP1]]
+//
+vbool8_t test_vreinterpret_v_u16m1_b8(vuint16m1_t src) {
+  return __riscv_vreinterpret_v_u16m1_b8(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_b8_u16m1
+// CHECK-RV64-SAME: (<vscale x 8 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv8i1(<vscale x 64 x i1> poison, <vscale x 8 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 4 x i16>
+// CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP1]]
+//
+vuint16m1_t test_vreinterpret_v_b8_u16m1(vbool8_t src) {
+  return __riscv_vreinterpret_v_b8_u16m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i1> @test_vreinterpret_v_u16m1_b4
+// CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 16 x i1> @llvm.vector.extract.nxv16i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 16 x i1> [[TMP1]]
+//
+vbool4_t test_vreinterpret_v_u16m1_b4(vuint16m1_t src) {
+  return __riscv_vreinterpret_v_u16m1_b4(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_b4_u16m1
+// CHECK-RV64-SAME: (<vscale x 16 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv16i1(<vscale x 64 x i1> poison, <vscale x 16 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 4 x i16>
+// CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP1]]
+//
+vuint16m1_t test_vreinterpret_v_b4_u16m1(vbool4_t src) {
+  return __riscv_vreinterpret_v_b4_u16m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 32 x i1> @test_vreinterpret_v_u16m1_b2
+// CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 32 x i1> @llvm.vector.extract.nxv32i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 32 x i1> [[TMP1]]
+//
+vbool2_t test_vreinterpret_v_u16m1_b2(vuint16m1_t src) {
+  return __riscv_vreinterpret_v_u16m1_b2(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_b2_u16m1
+// CHECK-RV64-SAME: (<vscale x 32 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv32i1(<vscale x 64 x i1> poison, <vscale x 32 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 4 x i16>
+// CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP1]]
+//
+vuint16m1_t test_vreinterpret_v_b2_u16m1(vbool2_t src) {
+  return __riscv_vreinterpret_v_b2_u16m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i1> @test_vreinterpret_v_i32m1_b64
+// CHECK-RV64-SAME: (<vscale x 2 x i32> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 1 x i1> @llvm.vector.extract.nxv1i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 1 x i1> [[TMP1]]
+//
+vbool64_t test_vreinterpret_v_i32m1_b64(vint32m1_t src) {
+  return __riscv_vreinterpret_v_i32m1_b64(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vreinterpret_v_b64_i32m1
+// CHECK-RV64-SAME: (<vscale x 1 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv1i1(<vscale x 64 x i1> poison, <vscale x 1 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 2 x i32>
+// CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP1]]
+//
+vint32m1_t test_vreinterpret_v_b64_i32m1(vbool64_t src) {
+  return __riscv_vreinterpret_v_b64_i32m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i1> @test_vreinterpret_v_i32m1_b32
+// CHECK-RV64-SAME: (<vscale x 2 x i32> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 2 x i1> @llvm.vector.extract.nxv2i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 2 x i1> [[TMP1]]
+//
+vbool32_t test_vreinterpret_v_i32m1_b32(vint32m1_t src) {
+  return __riscv_vreinterpret_v_i32m1_b32(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vreinterpret_v_b32_i32m1
+// CHECK-RV64-SAME: (<vscale x 2 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv2i1(<vscale x 64 x i1> poison, <vscale x 2 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 2 x i32>
+// CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP1]]
+//
+vint32m1_t test_vreinterpret_v_b32_i32m1(vbool32_t src) {
+  return __riscv_vreinterpret_v_b32_i32m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i1> @test_vreinterpret_v_i32m1_b16
+// CHECK-RV64-SAME: (<vscale x 2 x i32> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 4 x i1> @llvm.vector.extract.nxv4i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 4 x i1> [[TMP1]]
+//
+vbool16_t test_vreinterpret_v_i32m1_b16(vint32m1_t src) {
+  return __riscv_vreinterpret_v_i32m1_b16(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vreinterpret_v_b16_i32m1
+// CHECK-RV64-SAME: (<vscale x 4 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv4i1(<vscale x 64 x i1> poison, <vscale x 4 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 2 x i32>
+// CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP1]]
+//
+vint32m1_t test_vreinterpret_v_b16_i32m1(vbool16_t src) {
+  return __riscv_vreinterpret_v_b16_i32m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i1> @test_vreinterpret_v_i32m1_b8
+// CHECK-RV64-SAME: (<vscale x 2 x i32> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 8 x i1> @llvm.vector.extract.nxv8i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i1> [[TMP1]]
+//
+vbool8_t test_vreinterpret_v_i32m1_b8(vint32m1_t src) {
+  return __riscv_vreinterpret_v_i32m1_b8(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vreinterpret_v_b8_i32m1
+// CHECK-RV64-SAME: (<vscale x 8 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv8i1(<vscale x 64 x i1> poison, <vscale x 8 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 2 x i32>
+// CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP1]]
+//
+vint32m1_t test_vreinterpret_v_b8_i32m1(vbool8_t src) {
+  return __riscv_vreinterpret_v_b8_i32m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i1> @test_vreinterpret_v_i32m1_b4
+// CHECK-RV64-SAME: (<vscale x 2 x i32> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 16 x i1> @llvm.vector.extract.nxv16i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 16 x i1> [[TMP1]]
+//
+vbool4_t test_vreinterpret_v_i32m1_b4(vint32m1_t src) {
+  return __riscv_vreinterpret_v_i32m1_b4(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vreinterpret_v_b4_i32m1
+// CHECK-RV64-SAME: (<vscale x 16 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv16i1(<vscale x 64 x i1> poison, <vscale x 16 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 2 x i32>
+// CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP1]]
+//
+vint32m1_t test_vreinterpret_v_b4_i32m1(vbool4_t src) {
+  return __riscv_vreinterpret_v_b4_i32m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i1> @test_vreinterpret_v_u32m1_b64
+// CHECK-RV64-SAME: (<vscale x 2 x i32> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 1 x i1> @llvm.vector.extract.nxv1i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 1 x i1> [[TMP1]]
+//
+vbool64_t test_vreinterpret_v_u32m1_b64(vuint32m1_t src) {
+  return __riscv_vreinterpret_v_u32m1_b64(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vreinterpret_v_b64_u32m1
+// CHECK-RV64-SAME: (<vscale x 1 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv1i1(<vscale x 64 x i1> poison, <vscale x 1 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 2 x i32>
+// CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP1]]
+//
+vuint32m1_t test_vreinterpret_v_b64_u32m1(vbool64_t src) {
+  return __riscv_vreinterpret_v_b64_u32m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i1> @test_vreinterpret_v_u32m1_b32
+// CHECK-RV64-SAME: (<vscale x 2 x i32> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 2 x i1> @llvm.vector.extract.nxv2i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 2 x i1> [[TMP1]]
+//
+vbool32_t test_vreinterpret_v_u32m1_b32(vuint32m1_t src) {
+  return __riscv_vreinterpret_v_u32m1_b32(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vreinterpret_v_b32_u32m1
+// CHECK-RV64-SAME: (<vscale x 2 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv2i1(<vscale x 64 x i1> poison, <vscale x 2 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 2 x i32>
+// CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP1]]
+//
+vuint32m1_t test_vreinterpret_v_b32_u32m1(vbool32_t src) {
+  return __riscv_vreinterpret_v_b32_u32m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i1> @test_vreinterpret_v_u32m1_b16
+// CHECK-RV64-SAME: (<vscale x 2 x i32> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 4 x i1> @llvm.vector.extract.nxv4i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 4 x i1> [[TMP1]]
+//
+vbool16_t test_vreinterpret_v_u32m1_b16(vuint32m1_t src) {
+  return __riscv_vreinterpret_v_u32m1_b16(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vreinterpret_v_b16_u32m1
+// CHECK-RV64-SAME: (<vscale x 4 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv4i1(<vscale x 64 x i1> poison, <vscale x 4 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 2 x i32>
+// CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP1]]
+//
+vuint32m1_t test_vreinterpret_v_b16_u32m1(vbool16_t src) {
+  return __riscv_vreinterpret_v_b16_u32m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i1> @test_vreinterpret_v_u32m1_b8
+// CHECK-RV64-SAME: (<vscale x 2 x i32> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 8 x i1> @llvm.vector.extract.nxv8i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i1> [[TMP1]]
+//
+vbool8_t test_vreinterpret_v_u32m1_b8(vuint32m1_t src) {
+  return __riscv_vreinterpret_v_u32m1_b8(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vreinterpret_v_b8_u32m1
+// CHECK-RV64-SAME: (<vscale x 8 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv8i1(<vscale x 64 x i1> poison, <vscale x 8 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 2 x i32>
+// CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP1]]
+//
+vuint32m1_t test_vreinterpret_v_b8_u32m1(vbool8_t src) {
+  return __riscv_vreinterpret_v_b8_u32m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i1> @test_vreinterpret_v_u32m1_b4
+// CHECK-RV64-SAME: (<vscale x 2 x i32> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 16 x i1> @llvm.vector.extract.nxv16i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 16 x i1> [[TMP1]]
+//
+vbool4_t test_vreinterpret_v_u32m1_b4(vuint32m1_t src) {
+  return __riscv_vreinterpret_v_u32m1_b4(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vreinterpret_v_b4_u32m1
+// CHECK-RV64-SAME: (<vscale x 16 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv16i1(<vscale x 64 x i1> poison, <vscale x 16 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 2 x i32>
+// CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP1]]
+//
+vuint32m1_t test_vreinterpret_v_b4_u32m1(vbool4_t src) {
+  return __riscv_vreinterpret_v_b4_u32m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i1> @test_vreinterpret_v_i64m1_b64
+// CHECK-RV64-SAME: (<vscale x 1 x i64> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 1 x i1> @llvm.vector.extract.nxv1i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 1 x i1> [[TMP1]]
+//
+vbool64_t test_vreinterpret_v_i64m1_b64(vint64m1_t src) {
+  return __riscv_vreinterpret_v_i64m1_b64(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vreinterpret_v_b64_i64m1
+// CHECK-RV64-SAME: (<vscale x 1 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv1i1(<vscale x 64 x i1> poison, <vscale x 1 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 1 x i64>
+// CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP1]]
+//
+vint64m1_t test_vreinterpret_v_b64_i64m1(vbool64_t src) {
+  return __riscv_vreinterpret_v_b64_i64m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i1> @test_vreinterpret_v_i64m1_b32
+// CHECK-RV64-SAME: (<vscale x 1 x i64> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 2 x i1> @llvm.vector.extract.nxv2i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 2 x i1> [[TMP1]]
+//
+vbool32_t test_vreinterpret_v_i64m1_b32(vint64m1_t src) {
+  return __riscv_vreinterpret_v_i64m1_b32(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vreinterpret_v_b32_i64m1
+// CHECK-RV64-SAME: (<vscale x 2 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv2i1(<vscale x 64 x i1> poison, <vscale x 2 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 1 x i64>
+// CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP1]]
+//
+vint64m1_t test_vreinterpret_v_b32_i64m1(vbool32_t src) {
+  return __riscv_vreinterpret_v_b32_i64m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i1> @test_vreinterpret_v_i64m1_b16
+// CHECK-RV64-SAME: (<vscale x 1 x i64> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 4 x i1> @llvm.vector.extract.nxv4i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 4 x i1> [[TMP1]]
+//
+vbool16_t test_vreinterpret_v_i64m1_b16(vint64m1_t src) {
+  return __riscv_vreinterpret_v_i64m1_b16(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vreinterpret_v_b16_i64m1
+// CHECK-RV64-SAME: (<vscale x 4 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv4i1(<vscale x 64 x i1> poison, <vscale x 4 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 1 x i64>
+// CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP1]]
+//
+vint64m1_t test_vreinterpret_v_b16_i64m1(vbool16_t src) {
+  return __riscv_vreinterpret_v_b16_i64m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i1> @test_vreinterpret_v_i64m1_b8
+// CHECK-RV64-SAME: (<vscale x 1 x i64> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 8 x i1> @llvm.vector.extract.nxv8i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i1> [[TMP1]]
+//
+vbool8_t test_vreinterpret_v_i64m1_b8(vint64m1_t src) {
+  return __riscv_vreinterpret_v_i64m1_b8(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vreinterpret_v_b8_i64m1
+// CHECK-RV64-SAME: (<vscale x 8 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv8i1(<vscale x 64 x i1> poison, <vscale x 8 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 1 x i64>
+// CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP1]]
+//
+vint64m1_t test_vreinterpret_v_b8_i64m1(vbool8_t src) {
+  return __riscv_vreinterpret_v_b8_i64m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i1> @test_vreinterpret_v_u64m1_b64
+// CHECK-RV64-SAME: (<vscale x 1 x i64> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 1 x i1> @llvm.vector.extract.nxv1i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 1 x i1> [[TMP1]]
+//
+vbool64_t test_vreinterpret_v_u64m1_b64(vuint64m1_t src) {
+  return __riscv_vreinterpret_v_u64m1_b64(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vreinterpret_v_b64_u64m1
+// CHECK-RV64-SAME: (<vscale x 1 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv1i1(<vscale x 64 x i1> poison, <vscale x 1 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 1 x i64>
+// CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP1]]
+//
+vuint64m1_t test_vreinterpret_v_b64_u64m1(vbool64_t src) {
+  return __riscv_vreinterpret_v_b64_u64m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i1> @test_vreinterpret_v_u64m1_b32
+// CHECK-RV64-SAME: (<vscale x 1 x i64> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 2 x i1> @llvm.vector.extract.nxv2i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 2 x i1> [[TMP1]]
+//
+vbool32_t test_vreinterpret_v_u64m1_b32(vuint64m1_t src) {
+  return __riscv_vreinterpret_v_u64m1_b32(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vreinterpret_v_b32_u64m1
+// CHECK-RV64-SAME: (<vscale x 2 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv2i1(<vscale x 64 x i1> poison, <vscale x 2 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 1 x i64>
+// CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP1]]
+//
+vuint64m1_t test_vreinterpret_v_b32_u64m1(vbool32_t src) {
+  return __riscv_vreinterpret_v_b32_u64m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i1> @test_vreinterpret_v_u64m1_b16
+// CHECK-RV64-SAME: (<vscale x 1 x i64> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 4 x i1> @llvm.vector.extract.nxv4i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 4 x i1> [[TMP1]]
+//
+vbool16_t test_vreinterpret_v_u64m1_b16(vuint64m1_t src) {
+  return __riscv_vreinterpret_v_u64m1_b16(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vreinterpret_v_b16_u64m1
+// CHECK-RV64-SAME: (<vscale x 4 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv4i1(<vscale x 64 x i1> poison, <vscale x 4 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 1 x i64>
+// CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP1]]
+//
+vuint64m1_t test_vreinterpret_v_b16_u64m1(vbool16_t src) {
+  return __riscv_vreinterpret_v_b16_u64m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i1> @test_vreinterpret_v_u64m1_b8
+// CHECK-RV64-SAME: (<vscale x 1 x i64> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 8 x i1> @llvm.vector.extract.nxv8i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i1> [[TMP1]]
+//
+vbool8_t test_vreinterpret_v_u64m1_b8(vuint64m1_t src) {
+  return __riscv_vreinterpret_v_u64m1_b8(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vreinterpret_v_b8_u64m1
+// CHECK-RV64-SAME: (<vscale x 8 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv8i1(<vscale x 64 x i1> poison, <vscale x 8 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 1 x i64>
+// CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP1]]
+//
+vuint64m1_t test_vreinterpret_v_b8_u64m1(vbool8_t src) {
+  return __riscv_vreinterpret_v_b8_u64m1(src);
+}
+

diff  --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vreinterpret.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vreinterpret.c
index dcd6ef6717458..310ba449c04f9 100644
--- a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vreinterpret.c
+++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vreinterpret.c
@@ -2123,3 +2123,971 @@ vuint32m8_t test_vreinterpret_v_u64m8_u32m8(vuint64m8_t src) {
   return __riscv_vreinterpret_u32m8(src);
 }
 
+// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i1> @test_vreinterpret_v_i8m1_b64
+// CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 1 x i1> @llvm.vector.extract.nxv1i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 1 x i1> [[TMP1]]
+//
+vbool64_t test_vreinterpret_v_i8m1_b64(vint8m1_t src) {
+  return __riscv_vreinterpret_b64(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b64_i8m1
+// CHECK-RV64-SAME: (<vscale x 1 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv1i1(<vscale x 64 x i1> poison, <vscale x 1 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP1]]
+//
+vint8m1_t test_vreinterpret_v_b64_i8m1(vbool64_t src) {
+  return __riscv_vreinterpret_i8m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i1> @test_vreinterpret_v_i8m1_b32
+// CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 2 x i1> @llvm.vector.extract.nxv2i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 2 x i1> [[TMP1]]
+//
+vbool32_t test_vreinterpret_v_i8m1_b32(vint8m1_t src) {
+  return __riscv_vreinterpret_b32(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b32_i8m1
+// CHECK-RV64-SAME: (<vscale x 2 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv2i1(<vscale x 64 x i1> poison, <vscale x 2 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP1]]
+//
+vint8m1_t test_vreinterpret_v_b32_i8m1(vbool32_t src) {
+  return __riscv_vreinterpret_i8m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i1> @test_vreinterpret_v_i8m1_b16
+// CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 4 x i1> @llvm.vector.extract.nxv4i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 4 x i1> [[TMP1]]
+//
+vbool16_t test_vreinterpret_v_i8m1_b16(vint8m1_t src) {
+  return __riscv_vreinterpret_b16(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b16_i8m1
+// CHECK-RV64-SAME: (<vscale x 4 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv4i1(<vscale x 64 x i1> poison, <vscale x 4 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP1]]
+//
+vint8m1_t test_vreinterpret_v_b16_i8m1(vbool16_t src) {
+  return __riscv_vreinterpret_i8m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i1> @test_vreinterpret_v_i8m1_b8
+// CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 8 x i1> @llvm.vector.extract.nxv8i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i1> [[TMP1]]
+//
+vbool8_t test_vreinterpret_v_i8m1_b8(vint8m1_t src) {
+  return __riscv_vreinterpret_b8(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b8_i8m1
+// CHECK-RV64-SAME: (<vscale x 8 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv8i1(<vscale x 64 x i1> poison, <vscale x 8 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP1]]
+//
+vint8m1_t test_vreinterpret_v_b8_i8m1(vbool8_t src) {
+  return __riscv_vreinterpret_i8m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i1> @test_vreinterpret_v_i8m1_b4
+// CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 16 x i1> @llvm.vector.extract.nxv16i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 16 x i1> [[TMP1]]
+//
+vbool4_t test_vreinterpret_v_i8m1_b4(vint8m1_t src) {
+  return __riscv_vreinterpret_b4(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b4_i8m1
+// CHECK-RV64-SAME: (<vscale x 16 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv16i1(<vscale x 64 x i1> poison, <vscale x 16 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP1]]
+//
+vint8m1_t test_vreinterpret_v_b4_i8m1(vbool4_t src) {
+  return __riscv_vreinterpret_i8m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 32 x i1> @test_vreinterpret_v_i8m1_b2
+// CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 32 x i1> @llvm.vector.extract.nxv32i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 32 x i1> [[TMP1]]
+//
+vbool2_t test_vreinterpret_v_i8m1_b2(vint8m1_t src) {
+  return __riscv_vreinterpret_b2(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b2_i8m1
+// CHECK-RV64-SAME: (<vscale x 32 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv32i1(<vscale x 64 x i1> poison, <vscale x 32 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP1]]
+//
+vint8m1_t test_vreinterpret_v_b2_i8m1(vbool2_t src) {
+  return __riscv_vreinterpret_i8m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 64 x i1> @test_vreinterpret_v_i8m1_b1
+// CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 64 x i1> @llvm.vector.extract.nxv64i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 64 x i1> [[TMP1]]
+//
+vbool1_t test_vreinterpret_v_i8m1_b1(vint8m1_t src) {
+  return __riscv_vreinterpret_b1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b1_i8m1
+// CHECK-RV64-SAME: (<vscale x 64 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv64i1(<vscale x 64 x i1> poison, <vscale x 64 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP1]]
+//
+vint8m1_t test_vreinterpret_v_b1_i8m1(vbool1_t src) {
+  return __riscv_vreinterpret_i8m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i1> @test_vreinterpret_v_u8m1_b64
+// CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 1 x i1> @llvm.vector.extract.nxv1i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 1 x i1> [[TMP1]]
+//
+vbool64_t test_vreinterpret_v_u8m1_b64(vuint8m1_t src) {
+  return __riscv_vreinterpret_b64(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b64_u8m1
+// CHECK-RV64-SAME: (<vscale x 1 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv1i1(<vscale x 64 x i1> poison, <vscale x 1 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP1]]
+//
+vuint8m1_t test_vreinterpret_v_b64_u8m1(vbool64_t src) {
+  return __riscv_vreinterpret_u8m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i1> @test_vreinterpret_v_u8m1_b32
+// CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 2 x i1> @llvm.vector.extract.nxv2i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 2 x i1> [[TMP1]]
+//
+vbool32_t test_vreinterpret_v_u8m1_b32(vuint8m1_t src) {
+  return __riscv_vreinterpret_b32(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b32_u8m1
+// CHECK-RV64-SAME: (<vscale x 2 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv2i1(<vscale x 64 x i1> poison, <vscale x 2 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP1]]
+//
+vuint8m1_t test_vreinterpret_v_b32_u8m1(vbool32_t src) {
+  return __riscv_vreinterpret_u8m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i1> @test_vreinterpret_v_u8m1_b16
+// CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 4 x i1> @llvm.vector.extract.nxv4i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 4 x i1> [[TMP1]]
+//
+vbool16_t test_vreinterpret_v_u8m1_b16(vuint8m1_t src) {
+  return __riscv_vreinterpret_b16(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b16_u8m1
+// CHECK-RV64-SAME: (<vscale x 4 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv4i1(<vscale x 64 x i1> poison, <vscale x 4 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP1]]
+//
+vuint8m1_t test_vreinterpret_v_b16_u8m1(vbool16_t src) {
+  return __riscv_vreinterpret_u8m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i1> @test_vreinterpret_v_u8m1_b8
+// CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 8 x i1> @llvm.vector.extract.nxv8i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i1> [[TMP1]]
+//
+vbool8_t test_vreinterpret_v_u8m1_b8(vuint8m1_t src) {
+  return __riscv_vreinterpret_b8(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b8_u8m1
+// CHECK-RV64-SAME: (<vscale x 8 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv8i1(<vscale x 64 x i1> poison, <vscale x 8 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP1]]
+//
+vuint8m1_t test_vreinterpret_v_b8_u8m1(vbool8_t src) {
+  return __riscv_vreinterpret_u8m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i1> @test_vreinterpret_v_u8m1_b4
+// CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 16 x i1> @llvm.vector.extract.nxv16i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 16 x i1> [[TMP1]]
+//
+vbool4_t test_vreinterpret_v_u8m1_b4(vuint8m1_t src) {
+  return __riscv_vreinterpret_b4(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b4_u8m1
+// CHECK-RV64-SAME: (<vscale x 16 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv16i1(<vscale x 64 x i1> poison, <vscale x 16 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP1]]
+//
+vuint8m1_t test_vreinterpret_v_b4_u8m1(vbool4_t src) {
+  return __riscv_vreinterpret_u8m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 32 x i1> @test_vreinterpret_v_u8m1_b2
+// CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 32 x i1> @llvm.vector.extract.nxv32i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 32 x i1> [[TMP1]]
+//
+vbool2_t test_vreinterpret_v_u8m1_b2(vuint8m1_t src) {
+  return __riscv_vreinterpret_b2(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b2_u8m1
+// CHECK-RV64-SAME: (<vscale x 32 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv32i1(<vscale x 64 x i1> poison, <vscale x 32 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP1]]
+//
+vuint8m1_t test_vreinterpret_v_b2_u8m1(vbool2_t src) {
+  return __riscv_vreinterpret_u8m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 64 x i1> @test_vreinterpret_v_u8m1_b1
+// CHECK-RV64-SAME: (<vscale x 8 x i8> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 8 x i8> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 64 x i1> @llvm.vector.extract.nxv64i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 64 x i1> [[TMP1]]
+//
+vbool1_t test_vreinterpret_v_u8m1_b1(vuint8m1_t src) {
+  return __riscv_vreinterpret_b1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vreinterpret_v_b1_u8m1
+// CHECK-RV64-SAME: (<vscale x 64 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv64i1(<vscale x 64 x i1> poison, <vscale x 64 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 8 x i8>
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i8> [[TMP1]]
+//
+vuint8m1_t test_vreinterpret_v_b1_u8m1(vbool1_t src) {
+  return __riscv_vreinterpret_u8m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i1> @test_vreinterpret_v_i16m1_b64
+// CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 1 x i1> @llvm.vector.extract.nxv1i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 1 x i1> [[TMP1]]
+//
+vbool64_t test_vreinterpret_v_i16m1_b64(vint16m1_t src) {
+  return __riscv_vreinterpret_b64(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_b64_i16m1
+// CHECK-RV64-SAME: (<vscale x 1 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv1i1(<vscale x 64 x i1> poison, <vscale x 1 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 4 x i16>
+// CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP1]]
+//
+vint16m1_t test_vreinterpret_v_b64_i16m1(vbool64_t src) {
+  return __riscv_vreinterpret_i16m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i1> @test_vreinterpret_v_i16m1_b32
+// CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 2 x i1> @llvm.vector.extract.nxv2i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 2 x i1> [[TMP1]]
+//
+vbool32_t test_vreinterpret_v_i16m1_b32(vint16m1_t src) {
+  return __riscv_vreinterpret_b32(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_b32_i16m1
+// CHECK-RV64-SAME: (<vscale x 2 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv2i1(<vscale x 64 x i1> poison, <vscale x 2 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 4 x i16>
+// CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP1]]
+//
+vint16m1_t test_vreinterpret_v_b32_i16m1(vbool32_t src) {
+  return __riscv_vreinterpret_i16m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i1> @test_vreinterpret_v_i16m1_b16
+// CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 4 x i1> @llvm.vector.extract.nxv4i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 4 x i1> [[TMP1]]
+//
+vbool16_t test_vreinterpret_v_i16m1_b16(vint16m1_t src) {
+  return __riscv_vreinterpret_b16(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_b16_i16m1
+// CHECK-RV64-SAME: (<vscale x 4 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv4i1(<vscale x 64 x i1> poison, <vscale x 4 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 4 x i16>
+// CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP1]]
+//
+vint16m1_t test_vreinterpret_v_b16_i16m1(vbool16_t src) {
+  return __riscv_vreinterpret_i16m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i1> @test_vreinterpret_v_i16m1_b8
+// CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 8 x i1> @llvm.vector.extract.nxv8i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i1> [[TMP1]]
+//
+vbool8_t test_vreinterpret_v_i16m1_b8(vint16m1_t src) {
+  return __riscv_vreinterpret_b8(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_b8_i16m1
+// CHECK-RV64-SAME: (<vscale x 8 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv8i1(<vscale x 64 x i1> poison, <vscale x 8 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 4 x i16>
+// CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP1]]
+//
+vint16m1_t test_vreinterpret_v_b8_i16m1(vbool8_t src) {
+  return __riscv_vreinterpret_i16m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i1> @test_vreinterpret_v_i16m1_b4
+// CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 16 x i1> @llvm.vector.extract.nxv16i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 16 x i1> [[TMP1]]
+//
+vbool4_t test_vreinterpret_v_i16m1_b4(vint16m1_t src) {
+  return __riscv_vreinterpret_b4(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_b4_i16m1
+// CHECK-RV64-SAME: (<vscale x 16 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv16i1(<vscale x 64 x i1> poison, <vscale x 16 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 4 x i16>
+// CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP1]]
+//
+vint16m1_t test_vreinterpret_v_b4_i16m1(vbool4_t src) {
+  return __riscv_vreinterpret_i16m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 32 x i1> @test_vreinterpret_v_i16m1_b2
+// CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 32 x i1> @llvm.vector.extract.nxv32i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 32 x i1> [[TMP1]]
+//
+vbool2_t test_vreinterpret_v_i16m1_b2(vint16m1_t src) {
+  return __riscv_vreinterpret_b2(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_b2_i16m1
+// CHECK-RV64-SAME: (<vscale x 32 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv32i1(<vscale x 64 x i1> poison, <vscale x 32 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 4 x i16>
+// CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP1]]
+//
+vint16m1_t test_vreinterpret_v_b2_i16m1(vbool2_t src) {
+  return __riscv_vreinterpret_i16m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i1> @test_vreinterpret_v_u16m1_b64
+// CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 1 x i1> @llvm.vector.extract.nxv1i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 1 x i1> [[TMP1]]
+//
+vbool64_t test_vreinterpret_v_u16m1_b64(vuint16m1_t src) {
+  return __riscv_vreinterpret_b64(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_b64_u16m1
+// CHECK-RV64-SAME: (<vscale x 1 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv1i1(<vscale x 64 x i1> poison, <vscale x 1 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 4 x i16>
+// CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP1]]
+//
+vuint16m1_t test_vreinterpret_v_b64_u16m1(vbool64_t src) {
+  return __riscv_vreinterpret_u16m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i1> @test_vreinterpret_v_u16m1_b32
+// CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 2 x i1> @llvm.vector.extract.nxv2i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 2 x i1> [[TMP1]]
+//
+vbool32_t test_vreinterpret_v_u16m1_b32(vuint16m1_t src) {
+  return __riscv_vreinterpret_b32(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_b32_u16m1
+// CHECK-RV64-SAME: (<vscale x 2 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv2i1(<vscale x 64 x i1> poison, <vscale x 2 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 4 x i16>
+// CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP1]]
+//
+vuint16m1_t test_vreinterpret_v_b32_u16m1(vbool32_t src) {
+  return __riscv_vreinterpret_u16m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i1> @test_vreinterpret_v_u16m1_b16
+// CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 4 x i1> @llvm.vector.extract.nxv4i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 4 x i1> [[TMP1]]
+//
+vbool16_t test_vreinterpret_v_u16m1_b16(vuint16m1_t src) {
+  return __riscv_vreinterpret_b16(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_b16_u16m1
+// CHECK-RV64-SAME: (<vscale x 4 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv4i1(<vscale x 64 x i1> poison, <vscale x 4 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 4 x i16>
+// CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP1]]
+//
+vuint16m1_t test_vreinterpret_v_b16_u16m1(vbool16_t src) {
+  return __riscv_vreinterpret_u16m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i1> @test_vreinterpret_v_u16m1_b8
+// CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 8 x i1> @llvm.vector.extract.nxv8i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i1> [[TMP1]]
+//
+vbool8_t test_vreinterpret_v_u16m1_b8(vuint16m1_t src) {
+  return __riscv_vreinterpret_b8(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_b8_u16m1
+// CHECK-RV64-SAME: (<vscale x 8 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv8i1(<vscale x 64 x i1> poison, <vscale x 8 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 4 x i16>
+// CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP1]]
+//
+vuint16m1_t test_vreinterpret_v_b8_u16m1(vbool8_t src) {
+  return __riscv_vreinterpret_u16m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i1> @test_vreinterpret_v_u16m1_b4
+// CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 16 x i1> @llvm.vector.extract.nxv16i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 16 x i1> [[TMP1]]
+//
+vbool4_t test_vreinterpret_v_u16m1_b4(vuint16m1_t src) {
+  return __riscv_vreinterpret_b4(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_b4_u16m1
+// CHECK-RV64-SAME: (<vscale x 16 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv16i1(<vscale x 64 x i1> poison, <vscale x 16 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 4 x i16>
+// CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP1]]
+//
+vuint16m1_t test_vreinterpret_v_b4_u16m1(vbool4_t src) {
+  return __riscv_vreinterpret_u16m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 32 x i1> @test_vreinterpret_v_u16m1_b2
+// CHECK-RV64-SAME: (<vscale x 4 x i16> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 4 x i16> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 32 x i1> @llvm.vector.extract.nxv32i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 32 x i1> [[TMP1]]
+//
+vbool2_t test_vreinterpret_v_u16m1_b2(vuint16m1_t src) {
+  return __riscv_vreinterpret_b2(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vreinterpret_v_b2_u16m1
+// CHECK-RV64-SAME: (<vscale x 32 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv32i1(<vscale x 64 x i1> poison, <vscale x 32 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 4 x i16>
+// CHECK-RV64-NEXT:    ret <vscale x 4 x i16> [[TMP1]]
+//
+vuint16m1_t test_vreinterpret_v_b2_u16m1(vbool2_t src) {
+  return __riscv_vreinterpret_u16m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i1> @test_vreinterpret_v_i32m1_b64
+// CHECK-RV64-SAME: (<vscale x 2 x i32> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 1 x i1> @llvm.vector.extract.nxv1i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 1 x i1> [[TMP1]]
+//
+vbool64_t test_vreinterpret_v_i32m1_b64(vint32m1_t src) {
+  return __riscv_vreinterpret_b64(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vreinterpret_v_b64_i32m1
+// CHECK-RV64-SAME: (<vscale x 1 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv1i1(<vscale x 64 x i1> poison, <vscale x 1 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 2 x i32>
+// CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP1]]
+//
+vint32m1_t test_vreinterpret_v_b64_i32m1(vbool64_t src) {
+  return __riscv_vreinterpret_i32m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i1> @test_vreinterpret_v_i32m1_b32
+// CHECK-RV64-SAME: (<vscale x 2 x i32> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 2 x i1> @llvm.vector.extract.nxv2i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 2 x i1> [[TMP1]]
+//
+vbool32_t test_vreinterpret_v_i32m1_b32(vint32m1_t src) {
+  return __riscv_vreinterpret_b32(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vreinterpret_v_b32_i32m1
+// CHECK-RV64-SAME: (<vscale x 2 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv2i1(<vscale x 64 x i1> poison, <vscale x 2 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 2 x i32>
+// CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP1]]
+//
+vint32m1_t test_vreinterpret_v_b32_i32m1(vbool32_t src) {
+  return __riscv_vreinterpret_i32m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i1> @test_vreinterpret_v_i32m1_b16
+// CHECK-RV64-SAME: (<vscale x 2 x i32> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 4 x i1> @llvm.vector.extract.nxv4i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 4 x i1> [[TMP1]]
+//
+vbool16_t test_vreinterpret_v_i32m1_b16(vint32m1_t src) {
+  return __riscv_vreinterpret_b16(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vreinterpret_v_b16_i32m1
+// CHECK-RV64-SAME: (<vscale x 4 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv4i1(<vscale x 64 x i1> poison, <vscale x 4 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 2 x i32>
+// CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP1]]
+//
+vint32m1_t test_vreinterpret_v_b16_i32m1(vbool16_t src) {
+  return __riscv_vreinterpret_i32m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i1> @test_vreinterpret_v_i32m1_b8
+// CHECK-RV64-SAME: (<vscale x 2 x i32> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 8 x i1> @llvm.vector.extract.nxv8i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i1> [[TMP1]]
+//
+vbool8_t test_vreinterpret_v_i32m1_b8(vint32m1_t src) {
+  return __riscv_vreinterpret_b8(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vreinterpret_v_b8_i32m1
+// CHECK-RV64-SAME: (<vscale x 8 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv8i1(<vscale x 64 x i1> poison, <vscale x 8 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 2 x i32>
+// CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP1]]
+//
+vint32m1_t test_vreinterpret_v_b8_i32m1(vbool8_t src) {
+  return __riscv_vreinterpret_i32m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i1> @test_vreinterpret_v_i32m1_b4
+// CHECK-RV64-SAME: (<vscale x 2 x i32> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 16 x i1> @llvm.vector.extract.nxv16i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 16 x i1> [[TMP1]]
+//
+vbool4_t test_vreinterpret_v_i32m1_b4(vint32m1_t src) {
+  return __riscv_vreinterpret_b4(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vreinterpret_v_b4_i32m1
+// CHECK-RV64-SAME: (<vscale x 16 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv16i1(<vscale x 64 x i1> poison, <vscale x 16 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 2 x i32>
+// CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP1]]
+//
+vint32m1_t test_vreinterpret_v_b4_i32m1(vbool4_t src) {
+  return __riscv_vreinterpret_i32m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i1> @test_vreinterpret_v_u32m1_b64
+// CHECK-RV64-SAME: (<vscale x 2 x i32> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 1 x i1> @llvm.vector.extract.nxv1i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 1 x i1> [[TMP1]]
+//
+vbool64_t test_vreinterpret_v_u32m1_b64(vuint32m1_t src) {
+  return __riscv_vreinterpret_b64(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vreinterpret_v_b64_u32m1
+// CHECK-RV64-SAME: (<vscale x 1 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv1i1(<vscale x 64 x i1> poison, <vscale x 1 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 2 x i32>
+// CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP1]]
+//
+vuint32m1_t test_vreinterpret_v_b64_u32m1(vbool64_t src) {
+  return __riscv_vreinterpret_u32m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i1> @test_vreinterpret_v_u32m1_b32
+// CHECK-RV64-SAME: (<vscale x 2 x i32> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 2 x i1> @llvm.vector.extract.nxv2i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 2 x i1> [[TMP1]]
+//
+vbool32_t test_vreinterpret_v_u32m1_b32(vuint32m1_t src) {
+  return __riscv_vreinterpret_b32(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vreinterpret_v_b32_u32m1
+// CHECK-RV64-SAME: (<vscale x 2 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv2i1(<vscale x 64 x i1> poison, <vscale x 2 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 2 x i32>
+// CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP1]]
+//
+vuint32m1_t test_vreinterpret_v_b32_u32m1(vbool32_t src) {
+  return __riscv_vreinterpret_u32m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i1> @test_vreinterpret_v_u32m1_b16
+// CHECK-RV64-SAME: (<vscale x 2 x i32> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 4 x i1> @llvm.vector.extract.nxv4i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 4 x i1> [[TMP1]]
+//
+vbool16_t test_vreinterpret_v_u32m1_b16(vuint32m1_t src) {
+  return __riscv_vreinterpret_b16(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vreinterpret_v_b16_u32m1
+// CHECK-RV64-SAME: (<vscale x 4 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv4i1(<vscale x 64 x i1> poison, <vscale x 4 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 2 x i32>
+// CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP1]]
+//
+vuint32m1_t test_vreinterpret_v_b16_u32m1(vbool16_t src) {
+  return __riscv_vreinterpret_u32m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i1> @test_vreinterpret_v_u32m1_b8
+// CHECK-RV64-SAME: (<vscale x 2 x i32> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 8 x i1> @llvm.vector.extract.nxv8i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i1> [[TMP1]]
+//
+vbool8_t test_vreinterpret_v_u32m1_b8(vuint32m1_t src) {
+  return __riscv_vreinterpret_b8(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vreinterpret_v_b8_u32m1
+// CHECK-RV64-SAME: (<vscale x 8 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv8i1(<vscale x 64 x i1> poison, <vscale x 8 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 2 x i32>
+// CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP1]]
+//
+vuint32m1_t test_vreinterpret_v_b8_u32m1(vbool8_t src) {
+  return __riscv_vreinterpret_u32m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 16 x i1> @test_vreinterpret_v_u32m1_b4
+// CHECK-RV64-SAME: (<vscale x 2 x i32> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 2 x i32> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 16 x i1> @llvm.vector.extract.nxv16i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 16 x i1> [[TMP1]]
+//
+vbool4_t test_vreinterpret_v_u32m1_b4(vuint32m1_t src) {
+  return __riscv_vreinterpret_b4(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vreinterpret_v_b4_u32m1
+// CHECK-RV64-SAME: (<vscale x 16 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv16i1(<vscale x 64 x i1> poison, <vscale x 16 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 2 x i32>
+// CHECK-RV64-NEXT:    ret <vscale x 2 x i32> [[TMP1]]
+//
+vuint32m1_t test_vreinterpret_v_b4_u32m1(vbool4_t src) {
+  return __riscv_vreinterpret_u32m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i1> @test_vreinterpret_v_i64m1_b64
+// CHECK-RV64-SAME: (<vscale x 1 x i64> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 1 x i1> @llvm.vector.extract.nxv1i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 1 x i1> [[TMP1]]
+//
+vbool64_t test_vreinterpret_v_i64m1_b64(vint64m1_t src) {
+  return __riscv_vreinterpret_b64(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vreinterpret_v_b64_i64m1
+// CHECK-RV64-SAME: (<vscale x 1 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv1i1(<vscale x 64 x i1> poison, <vscale x 1 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 1 x i64>
+// CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP1]]
+//
+vint64m1_t test_vreinterpret_v_b64_i64m1(vbool64_t src) {
+  return __riscv_vreinterpret_i64m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i1> @test_vreinterpret_v_i64m1_b32
+// CHECK-RV64-SAME: (<vscale x 1 x i64> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 2 x i1> @llvm.vector.extract.nxv2i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 2 x i1> [[TMP1]]
+//
+vbool32_t test_vreinterpret_v_i64m1_b32(vint64m1_t src) {
+  return __riscv_vreinterpret_b32(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vreinterpret_v_b32_i64m1
+// CHECK-RV64-SAME: (<vscale x 2 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv2i1(<vscale x 64 x i1> poison, <vscale x 2 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 1 x i64>
+// CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP1]]
+//
+vint64m1_t test_vreinterpret_v_b32_i64m1(vbool32_t src) {
+  return __riscv_vreinterpret_i64m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i1> @test_vreinterpret_v_i64m1_b16
+// CHECK-RV64-SAME: (<vscale x 1 x i64> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 4 x i1> @llvm.vector.extract.nxv4i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 4 x i1> [[TMP1]]
+//
+vbool16_t test_vreinterpret_v_i64m1_b16(vint64m1_t src) {
+  return __riscv_vreinterpret_b16(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vreinterpret_v_b16_i64m1
+// CHECK-RV64-SAME: (<vscale x 4 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv4i1(<vscale x 64 x i1> poison, <vscale x 4 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 1 x i64>
+// CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP1]]
+//
+vint64m1_t test_vreinterpret_v_b16_i64m1(vbool16_t src) {
+  return __riscv_vreinterpret_i64m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i1> @test_vreinterpret_v_i64m1_b8
+// CHECK-RV64-SAME: (<vscale x 1 x i64> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 8 x i1> @llvm.vector.extract.nxv8i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i1> [[TMP1]]
+//
+vbool8_t test_vreinterpret_v_i64m1_b8(vint64m1_t src) {
+  return __riscv_vreinterpret_b8(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vreinterpret_v_b8_i64m1
+// CHECK-RV64-SAME: (<vscale x 8 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv8i1(<vscale x 64 x i1> poison, <vscale x 8 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 1 x i64>
+// CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP1]]
+//
+vint64m1_t test_vreinterpret_v_b8_i64m1(vbool8_t src) {
+  return __riscv_vreinterpret_i64m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i1> @test_vreinterpret_v_u64m1_b64
+// CHECK-RV64-SAME: (<vscale x 1 x i64> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 1 x i1> @llvm.vector.extract.nxv1i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 1 x i1> [[TMP1]]
+//
+vbool64_t test_vreinterpret_v_u64m1_b64(vuint64m1_t src) {
+  return __riscv_vreinterpret_b64(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vreinterpret_v_b64_u64m1
+// CHECK-RV64-SAME: (<vscale x 1 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv1i1(<vscale x 64 x i1> poison, <vscale x 1 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 1 x i64>
+// CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP1]]
+//
+vuint64m1_t test_vreinterpret_v_b64_u64m1(vbool64_t src) {
+  return __riscv_vreinterpret_u64m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 2 x i1> @test_vreinterpret_v_u64m1_b32
+// CHECK-RV64-SAME: (<vscale x 1 x i64> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 2 x i1> @llvm.vector.extract.nxv2i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 2 x i1> [[TMP1]]
+//
+vbool32_t test_vreinterpret_v_u64m1_b32(vuint64m1_t src) {
+  return __riscv_vreinterpret_b32(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vreinterpret_v_b32_u64m1
+// CHECK-RV64-SAME: (<vscale x 2 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv2i1(<vscale x 64 x i1> poison, <vscale x 2 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 1 x i64>
+// CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP1]]
+//
+vuint64m1_t test_vreinterpret_v_b32_u64m1(vbool32_t src) {
+  return __riscv_vreinterpret_u64m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 4 x i1> @test_vreinterpret_v_u64m1_b16
+// CHECK-RV64-SAME: (<vscale x 1 x i64> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 4 x i1> @llvm.vector.extract.nxv4i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 4 x i1> [[TMP1]]
+//
+vbool16_t test_vreinterpret_v_u64m1_b16(vuint64m1_t src) {
+  return __riscv_vreinterpret_b16(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vreinterpret_v_b16_u64m1
+// CHECK-RV64-SAME: (<vscale x 4 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv4i1(<vscale x 64 x i1> poison, <vscale x 4 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 1 x i64>
+// CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP1]]
+//
+vuint64m1_t test_vreinterpret_v_b16_u64m1(vbool16_t src) {
+  return __riscv_vreinterpret_u64m1(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 8 x i1> @test_vreinterpret_v_u64m1_b8
+// CHECK-RV64-SAME: (<vscale x 1 x i64> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = bitcast <vscale x 1 x i64> [[SRC]] to <vscale x 64 x i1>
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = call <vscale x 8 x i1> @llvm.vector.extract.nxv8i1.nxv64i1(<vscale x 64 x i1> [[TMP0]], i64 0)
+// CHECK-RV64-NEXT:    ret <vscale x 8 x i1> [[TMP1]]
+//
+vbool8_t test_vreinterpret_v_u64m1_b8(vuint64m1_t src) {
+  return __riscv_vreinterpret_b8(src);
+}
+
+// CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vreinterpret_v_b8_u64m1
+// CHECK-RV64-SAME: (<vscale x 8 x i1> [[SRC:%.*]]) #[[ATTR0]] {
+// CHECK-RV64-NEXT:  entry:
+// CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 64 x i1> @llvm.vector.insert.nxv64i1.nxv8i1(<vscale x 64 x i1> poison, <vscale x 8 x i1> [[SRC]], i64 0)
+// CHECK-RV64-NEXT:    [[TMP1:%.*]] = bitcast <vscale x 64 x i1> [[TMP0]] to <vscale x 1 x i64>
+// CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP1]]
+//
+vuint64m1_t test_vreinterpret_v_b8_u64m1(vbool8_t src) {
+  return __riscv_vreinterpret_u64m1(src);
+}
+


        


More information about the cfe-commits mailing list