[llvm] a45f20b - [RISCV][test] Add additional check-prefixes to fix conflicts in tests (#160689)
via llvm-commits
llvm-commits at lists.llvm.org
Fri Sep 26 11:32:39 PDT 2025
Author: Alex Bradbury
Date: 2025-09-26T19:32:35+01:00
New Revision: a45f20b5af4b23f55c3256829ec30f8f307c1d90
URL: https://github.com/llvm/llvm-project/commit/a45f20b5af4b23f55c3256829ec30f8f307c1d90
DIFF: https://github.com/llvm/llvm-project/commit/a45f20b5af4b23f55c3256829ec30f8f307c1d90.diff
LOG: [RISCV][test] Add additional check-prefixes to fix conflicts in tests (#160689)
These are all cases where check lines were being silently dropped prior
to #159321 which added proper warnings.
I did `find llvm/test/CodeGen/RISCV -name "*.ll" -exec
./llvm/utils/update_llc_test_checks.py --llc-bin=./remote-llc -u {} \;`
and went through all cases that emitted the new warning.
`idiv_large.ll` is a case that seems to not be generated by
update_llc_test_checks but still has the comment indicating it was
(presumably it was hand-edited after generation).
Added:
Modified:
llvm/test/CodeGen/RISCV/GlobalISel/float-intrinsics.ll
llvm/test/CodeGen/RISCV/cmov-branch-opt.ll
llvm/test/CodeGen/RISCV/double-select-fcmp.ll
llvm/test/CodeGen/RISCV/float-intrinsics.ll
llvm/test/CodeGen/RISCV/idiv_large.ll
llvm/test/CodeGen/RISCV/rvv/fixed-vectors-fp-setcc.ll
llvm/test/CodeGen/RISCV/rvv/fixed-vectors-shuffle-rotate.ll
llvm/test/CodeGen/RISCV/rvv/fixed-vectors-vfw-web-simplification.ll
llvm/test/CodeGen/RISCV/rvv/fixed-vectors-vwmulu.ll
llvm/test/CodeGen/RISCV/rvv/vmerge.ll
Removed:
################################################################################
diff --git a/llvm/test/CodeGen/RISCV/GlobalISel/float-intrinsics.ll b/llvm/test/CodeGen/RISCV/GlobalISel/float-intrinsics.ll
index 7f387a763b6da..23f660bb026a7 100644
--- a/llvm/test/CodeGen/RISCV/GlobalISel/float-intrinsics.ll
+++ b/llvm/test/CodeGen/RISCV/GlobalISel/float-intrinsics.ll
@@ -10,7 +10,7 @@
; RUN: | FileCheck -check-prefix=RV64IF %s
; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -global-isel -mattr=+d \
; RUN: -target-abi=lp64d \
-; RUN: | FileCheck -check-prefix=RV64IF %s
+; RUN: | FileCheck -check-prefix=RV64IFD %s
; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=riscv32 -global-isel \
; RUN: | FileCheck -check-prefix=RV32I %s
; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -global-isel \
@@ -27,6 +27,11 @@ define float @sqrt_f32(float %a) nounwind {
; RV64IF-NEXT: fsqrt.s fa0, fa0
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: sqrt_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fsqrt.s fa0, fa0
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: sqrt_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -68,6 +73,16 @@ define float @powi_f32(float %a, i32 %b) nounwind {
; RV64IF-NEXT: addi sp, sp, 16
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: powi_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: addi sp, sp, -16
+; RV64IFD-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
+; RV64IFD-NEXT: sext.w a0, a0
+; RV64IFD-NEXT: call __powisf2
+; RV64IFD-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
+; RV64IFD-NEXT: addi sp, sp, 16
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: powi_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -109,6 +124,15 @@ define float @sin_f32(float %a) nounwind {
; RV64IF-NEXT: addi sp, sp, 16
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: sin_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: addi sp, sp, -16
+; RV64IFD-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
+; RV64IFD-NEXT: call sinf
+; RV64IFD-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
+; RV64IFD-NEXT: addi sp, sp, 16
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: sin_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -149,6 +173,15 @@ define float @cos_f32(float %a) nounwind {
; RV64IF-NEXT: addi sp, sp, 16
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: cos_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: addi sp, sp, -16
+; RV64IFD-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
+; RV64IFD-NEXT: call cosf
+; RV64IFD-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
+; RV64IFD-NEXT: addi sp, sp, 16
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: cos_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -190,6 +223,42 @@ define float @sincos_f32(float %a) nounwind {
; RV32IF-NEXT: addi sp, sp, 16
; RV32IF-NEXT: ret
;
+; RV64IF-LABEL: sincos_f32:
+; RV64IF: # %bb.0:
+; RV64IF-NEXT: addi sp, sp, -16
+; RV64IF-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
+; RV64IF-NEXT: fsw fs0, 4(sp) # 4-byte Folded Spill
+; RV64IF-NEXT: fsw fs1, 0(sp) # 4-byte Folded Spill
+; RV64IF-NEXT: fmv.s fs0, fa0
+; RV64IF-NEXT: call sinf
+; RV64IF-NEXT: fmv.s fs1, fa0
+; RV64IF-NEXT: fmv.s fa0, fs0
+; RV64IF-NEXT: call cosf
+; RV64IF-NEXT: fadd.s fa0, fs1, fa0
+; RV64IF-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
+; RV64IF-NEXT: flw fs0, 4(sp) # 4-byte Folded Reload
+; RV64IF-NEXT: flw fs1, 0(sp) # 4-byte Folded Reload
+; RV64IF-NEXT: addi sp, sp, 16
+; RV64IF-NEXT: ret
+;
+; RV64IFD-LABEL: sincos_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: addi sp, sp, -32
+; RV64IFD-NEXT: sd ra, 24(sp) # 8-byte Folded Spill
+; RV64IFD-NEXT: fsd fs0, 16(sp) # 8-byte Folded Spill
+; RV64IFD-NEXT: fsd fs1, 8(sp) # 8-byte Folded Spill
+; RV64IFD-NEXT: fmv.s fs0, fa0
+; RV64IFD-NEXT: call sinf
+; RV64IFD-NEXT: fmv.s fs1, fa0
+; RV64IFD-NEXT: fmv.s fa0, fs0
+; RV64IFD-NEXT: call cosf
+; RV64IFD-NEXT: fadd.s fa0, fs1, fa0
+; RV64IFD-NEXT: ld ra, 24(sp) # 8-byte Folded Reload
+; RV64IFD-NEXT: fld fs0, 16(sp) # 8-byte Folded Reload
+; RV64IFD-NEXT: fld fs1, 8(sp) # 8-byte Folded Reload
+; RV64IFD-NEXT: addi sp, sp, 32
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: sincos_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -254,6 +323,15 @@ define float @pow_f32(float %a, float %b) nounwind {
; RV64IF-NEXT: addi sp, sp, 16
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: pow_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: addi sp, sp, -16
+; RV64IFD-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
+; RV64IFD-NEXT: call powf
+; RV64IFD-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
+; RV64IFD-NEXT: addi sp, sp, 16
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: pow_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -294,6 +372,15 @@ define float @exp_f32(float %a) nounwind {
; RV64IF-NEXT: addi sp, sp, 16
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: exp_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: addi sp, sp, -16
+; RV64IFD-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
+; RV64IFD-NEXT: call expf
+; RV64IFD-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
+; RV64IFD-NEXT: addi sp, sp, 16
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: exp_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -334,6 +421,15 @@ define float @exp2_f32(float %a) nounwind {
; RV64IF-NEXT: addi sp, sp, 16
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: exp2_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: addi sp, sp, -16
+; RV64IFD-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
+; RV64IFD-NEXT: call exp2f
+; RV64IFD-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
+; RV64IFD-NEXT: addi sp, sp, 16
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: exp2_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -374,6 +470,15 @@ define float @exp10_f32(float %a) nounwind {
; RV64IF-NEXT: addi sp, sp, 16
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: exp10_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: addi sp, sp, -16
+; RV64IFD-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
+; RV64IFD-NEXT: call exp10f
+; RV64IFD-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
+; RV64IFD-NEXT: addi sp, sp, 16
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: exp10_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -414,6 +519,15 @@ define float @log_f32(float %a) nounwind {
; RV64IF-NEXT: addi sp, sp, 16
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: log_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: addi sp, sp, -16
+; RV64IFD-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
+; RV64IFD-NEXT: call logf
+; RV64IFD-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
+; RV64IFD-NEXT: addi sp, sp, 16
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: log_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -454,6 +568,15 @@ define float @log10_f32(float %a) nounwind {
; RV64IF-NEXT: addi sp, sp, 16
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: log10_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: addi sp, sp, -16
+; RV64IFD-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
+; RV64IFD-NEXT: call log10f
+; RV64IFD-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
+; RV64IFD-NEXT: addi sp, sp, 16
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: log10_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -494,6 +617,15 @@ define float @log2_f32(float %a) nounwind {
; RV64IF-NEXT: addi sp, sp, 16
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: log2_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: addi sp, sp, -16
+; RV64IFD-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
+; RV64IFD-NEXT: call log2f
+; RV64IFD-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
+; RV64IFD-NEXT: addi sp, sp, 16
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: log2_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -526,6 +658,11 @@ define float @fma_f32(float %a, float %b, float %c) nounwind {
; RV64IF-NEXT: fmadd.s fa0, fa0, fa1, fa2
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: fma_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fmadd.s fa0, fa0, fa1, fa2
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: fma_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -558,6 +695,11 @@ define float @fmuladd_f32(float %a, float %b, float %c) nounwind {
; RV64IF-NEXT: fmadd.s fa0, fa0, fa1, fa2
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: fmuladd_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fmadd.s fa0, fa0, fa1, fa2
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: fmuladd_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -600,6 +742,11 @@ define float @fabs_f32(float %a) nounwind {
; RV64IF-NEXT: fabs.s fa0, fa0
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: fabs_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fabs.s fa0, fa0
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: fabs_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: slli a0, a0, 1
@@ -626,6 +773,11 @@ define float @minnum_f32(float %a, float %b) nounwind {
; RV64IF-NEXT: fmin.s fa0, fa0, fa1
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: minnum_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fmin.s fa0, fa0, fa1
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: minnum_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -658,6 +810,11 @@ define float @maxnum_f32(float %a, float %b) nounwind {
; RV64IF-NEXT: fmax.s fa0, fa0, fa1
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: maxnum_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fmax.s fa0, fa0, fa1
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: maxnum_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -690,6 +847,11 @@ define float @copysign_f32(float %a, float %b) nounwind {
; RV64IF-NEXT: fsgnj.s fa0, fa0, fa1
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: copysign_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fsgnj.s fa0, fa0, fa1
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: copysign_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: lui a2, 524288
@@ -730,6 +892,15 @@ define float @ceil_f32(float %a) nounwind {
; RV64IF-NEXT: addi sp, sp, 16
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: ceil_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: addi sp, sp, -16
+; RV64IFD-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
+; RV64IFD-NEXT: call ceilf
+; RV64IFD-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
+; RV64IFD-NEXT: addi sp, sp, 16
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: ceil_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -770,6 +941,15 @@ define float @trunc_f32(float %a) nounwind {
; RV64IF-NEXT: addi sp, sp, 16
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: trunc_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: addi sp, sp, -16
+; RV64IFD-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
+; RV64IFD-NEXT: call truncf
+; RV64IFD-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
+; RV64IFD-NEXT: addi sp, sp, 16
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: trunc_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -810,6 +990,15 @@ define float @rint_f32(float %a) nounwind {
; RV64IF-NEXT: addi sp, sp, 16
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: rint_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: addi sp, sp, -16
+; RV64IFD-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
+; RV64IFD-NEXT: call rintf
+; RV64IFD-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
+; RV64IFD-NEXT: addi sp, sp, 16
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: rint_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -850,6 +1039,15 @@ define float @nearbyint_f32(float %a) nounwind {
; RV64IF-NEXT: addi sp, sp, 16
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: nearbyint_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: addi sp, sp, -16
+; RV64IFD-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
+; RV64IFD-NEXT: call nearbyintf
+; RV64IFD-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
+; RV64IFD-NEXT: addi sp, sp, 16
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: nearbyint_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -890,6 +1088,15 @@ define float @round_f32(float %a) nounwind {
; RV64IF-NEXT: addi sp, sp, 16
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: round_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: addi sp, sp, -16
+; RV64IFD-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
+; RV64IFD-NEXT: call roundf
+; RV64IFD-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
+; RV64IFD-NEXT: addi sp, sp, 16
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: round_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -930,6 +1137,15 @@ define float @roundeven_f32(float %a) nounwind {
; RV64IF-NEXT: addi sp, sp, 16
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: roundeven_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: addi sp, sp, -16
+; RV64IFD-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
+; RV64IFD-NEXT: call roundevenf
+; RV64IFD-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
+; RV64IFD-NEXT: addi sp, sp, 16
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: roundeven_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -966,6 +1182,13 @@ define i1 @fpclass(float %x) {
; RV64IF-NEXT: snez a0, a0
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: fpclass:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fclass.s a0, fa0
+; RV64IFD-NEXT: andi a0, a0, 927
+; RV64IFD-NEXT: snez a0, a0
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: fpclass:
; RV32I: # %bb.0:
; RV32I-NEXT: lui a1, 522240
@@ -1039,6 +1262,13 @@ define i1 @isnan_fpclass(float %x) {
; RV64IF-NEXT: snez a0, a0
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: isnan_fpclass:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fclass.s a0, fa0
+; RV64IFD-NEXT: andi a0, a0, 768
+; RV64IFD-NEXT: snez a0, a0
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: isnan_fpclass:
; RV32I: # %bb.0:
; RV32I-NEXT: lui a1, 522240
@@ -1073,6 +1303,13 @@ define i1 @isqnan_fpclass(float %x) {
; RV64IF-NEXT: snez a0, a0
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: isqnan_fpclass:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fclass.s a0, fa0
+; RV64IFD-NEXT: andi a0, a0, 512
+; RV64IFD-NEXT: snez a0, a0
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: isqnan_fpclass:
; RV32I: # %bb.0:
; RV32I-NEXT: slli a0, a0, 1
@@ -1109,6 +1346,13 @@ define i1 @issnan_fpclass(float %x) {
; RV64IF-NEXT: snez a0, a0
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: issnan_fpclass:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fclass.s a0, fa0
+; RV64IFD-NEXT: andi a0, a0, 256
+; RV64IFD-NEXT: snez a0, a0
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: issnan_fpclass:
; RV32I: # %bb.0:
; RV32I-NEXT: lui a1, 522240
@@ -1149,6 +1393,13 @@ define i1 @isinf_fpclass(float %x) {
; RV64IF-NEXT: snez a0, a0
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: isinf_fpclass:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fclass.s a0, fa0
+; RV64IFD-NEXT: andi a0, a0, 129
+; RV64IFD-NEXT: snez a0, a0
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: isinf_fpclass:
; RV32I: # %bb.0:
; RV32I-NEXT: lui a1, 522240
@@ -1185,6 +1436,13 @@ define i1 @isposinf_fpclass(float %x) {
; RV64IF-NEXT: snez a0, a0
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: isposinf_fpclass:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fclass.s a0, fa0
+; RV64IFD-NEXT: andi a0, a0, 128
+; RV64IFD-NEXT: snez a0, a0
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: isposinf_fpclass:
; RV32I: # %bb.0:
; RV32I-NEXT: lui a1, 522240
@@ -1218,6 +1476,13 @@ define i1 @isneginf_fpclass(float %x) {
; RV64IF-NEXT: snez a0, a0
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: isneginf_fpclass:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fclass.s a0, fa0
+; RV64IFD-NEXT: andi a0, a0, 1
+; RV64IFD-NEXT: snez a0, a0
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: isneginf_fpclass:
; RV32I: # %bb.0:
; RV32I-NEXT: lui a1, 1046528
@@ -1251,6 +1516,13 @@ define i1 @isfinite_fpclass(float %x) {
; RV64IF-NEXT: snez a0, a0
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: isfinite_fpclass:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fclass.s a0, fa0
+; RV64IFD-NEXT: andi a0, a0, 126
+; RV64IFD-NEXT: snez a0, a0
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: isfinite_fpclass:
; RV32I: # %bb.0:
; RV32I-NEXT: lui a1, 522240
@@ -1285,6 +1557,13 @@ define i1 @isposfinite_fpclass(float %x) {
; RV64IF-NEXT: snez a0, a0
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: isposfinite_fpclass:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fclass.s a0, fa0
+; RV64IFD-NEXT: andi a0, a0, 112
+; RV64IFD-NEXT: snez a0, a0
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: isposfinite_fpclass:
; RV32I: # %bb.0:
; RV32I-NEXT: lui a1, 522240
@@ -1316,6 +1595,13 @@ define i1 @isnegfinite_fpclass(float %x) {
; RV64IF-NEXT: snez a0, a0
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: isnegfinite_fpclass:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fclass.s a0, fa0
+; RV64IFD-NEXT: andi a0, a0, 14
+; RV64IFD-NEXT: snez a0, a0
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: isnegfinite_fpclass:
; RV32I: # %bb.0:
; RV32I-NEXT: lui a1, 522240
@@ -1357,6 +1643,13 @@ define i1 @isnotfinite_fpclass(float %x) {
; RV64IF-NEXT: snez a0, a0
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: isnotfinite_fpclass:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fclass.s a0, fa0
+; RV64IFD-NEXT: andi a0, a0, 897
+; RV64IFD-NEXT: snez a0, a0
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: isnotfinite_fpclass:
; RV32I: # %bb.0:
; RV32I-NEXT: lui a1, 522240
@@ -1401,6 +1694,15 @@ define float @tan_f32(float %a) nounwind {
; RV64IF-NEXT: addi sp, sp, 16
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: tan_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: addi sp, sp, -16
+; RV64IFD-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
+; RV64IFD-NEXT: call tanf
+; RV64IFD-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
+; RV64IFD-NEXT: addi sp, sp, 16
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: tan_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -1442,6 +1744,16 @@ define float @ldexp_float(float %x, i32 %y) nounwind {
; RV64IF-NEXT: addi sp, sp, 16
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: ldexp_float:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: addi sp, sp, -16
+; RV64IFD-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
+; RV64IFD-NEXT: sext.w a0, a0
+; RV64IFD-NEXT: call ldexpf
+; RV64IFD-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
+; RV64IFD-NEXT: addi sp, sp, 16
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: ldexp_float:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -1483,6 +1795,15 @@ define float @asin_f32(float %a) nounwind {
; RV64IF-NEXT: addi sp, sp, 16
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: asin_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: addi sp, sp, -16
+; RV64IFD-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
+; RV64IFD-NEXT: call asinf
+; RV64IFD-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
+; RV64IFD-NEXT: addi sp, sp, 16
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: asin_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -1523,6 +1844,15 @@ define float @acos_f32(float %a) nounwind {
; RV64IF-NEXT: addi sp, sp, 16
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: acos_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: addi sp, sp, -16
+; RV64IFD-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
+; RV64IFD-NEXT: call acosf
+; RV64IFD-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
+; RV64IFD-NEXT: addi sp, sp, 16
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: acos_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -1563,6 +1893,15 @@ define float @atan_f32(float %a) nounwind {
; RV64IF-NEXT: addi sp, sp, 16
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: atan_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: addi sp, sp, -16
+; RV64IFD-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
+; RV64IFD-NEXT: call atanf
+; RV64IFD-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
+; RV64IFD-NEXT: addi sp, sp, 16
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: atan_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -1603,6 +1942,15 @@ define float @atan2_f32(float %a, float %b) nounwind {
; RV64IF-NEXT: addi sp, sp, 16
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: atan2_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: addi sp, sp, -16
+; RV64IFD-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
+; RV64IFD-NEXT: call atan2f
+; RV64IFD-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
+; RV64IFD-NEXT: addi sp, sp, 16
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: atan2_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -1643,6 +1991,15 @@ define float @sinh_f32(float %a) nounwind {
; RV64IF-NEXT: addi sp, sp, 16
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: sinh_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: addi sp, sp, -16
+; RV64IFD-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
+; RV64IFD-NEXT: call sinhf
+; RV64IFD-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
+; RV64IFD-NEXT: addi sp, sp, 16
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: sinh_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -1683,6 +2040,15 @@ define float @cosh_f32(float %a) nounwind {
; RV64IF-NEXT: addi sp, sp, 16
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: cosh_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: addi sp, sp, -16
+; RV64IFD-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
+; RV64IFD-NEXT: call coshf
+; RV64IFD-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
+; RV64IFD-NEXT: addi sp, sp, 16
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: cosh_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -1723,6 +2089,15 @@ define float @tanh_f32(float %a) nounwind {
; RV64IF-NEXT: addi sp, sp, 16
; RV64IF-NEXT: ret
;
+; RV64IFD-LABEL: tanh_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: addi sp, sp, -16
+; RV64IFD-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
+; RV64IFD-NEXT: call tanhf
+; RV64IFD-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
+; RV64IFD-NEXT: addi sp, sp, 16
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: tanh_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
diff --git a/llvm/test/CodeGen/RISCV/cmov-branch-opt.ll b/llvm/test/CodeGen/RISCV/cmov-branch-opt.ll
index 6608874286e34..f8b1d505f4e81 100644
--- a/llvm/test/CodeGen/RISCV/cmov-branch-opt.ll
+++ b/llvm/test/CodeGen/RISCV/cmov-branch-opt.ll
@@ -6,9 +6,9 @@
; RUN: llc -mtriple=riscv64 -mattr=+conditional-cmv-fusion,+c,+zicond -verify-machineinstrs < %s \
; RUN: | FileCheck -check-prefixes=CMOV,CMOV-ZICOND %s
; RUN: llc -mtriple=riscv64 -mattr=+short-forward-branch-opt -verify-machineinstrs < %s \
-; RUN: | FileCheck -check-prefixes=SHORT_FORWARD,SFB-NOZICOND %s
+; RUN: | FileCheck -check-prefixes=SHORT_FORWARD,SFB-NOZICOND,SFB-NOZICOND-NOC %s
; RUN: llc -mtriple=riscv64 -mattr=+short-forward-branch-opt,+c -verify-machineinstrs < %s \
-; RUN: | FileCheck -check-prefixes=SHORT_FORWARD,SFB-NOZICOND %s
+; RUN: | FileCheck -check-prefixes=SHORT_FORWARD,SFB-NOZICOND,SFB-NOZICOND-C %s
; RUN: llc -mtriple=riscv64 -mattr=+short-forward-branch-opt,+zicond -verify-machineinstrs < %s \
; RUN: | FileCheck -check-prefixes=SHORT_FORWARD,SFB-ZICOND %s
@@ -263,6 +263,24 @@ define i32 @select_xor_2(i32 %A, i32 %B, i8 %cond) {
; CMOV-NEXT: .LBB6_2: # %entry
; CMOV-NEXT: ret
;
+; SFB-NOZICOND-NOC-LABEL: select_xor_2:
+; SFB-NOZICOND-NOC: # %bb.0: # %entry
+; SFB-NOZICOND-NOC-NEXT: andi a2, a2, 1
+; SFB-NOZICOND-NOC-NEXT: beqz a2, .LBB6_2
+; SFB-NOZICOND-NOC-NEXT: # %bb.1: # %entry
+; SFB-NOZICOND-NOC-NEXT: xor a0, a1, a0
+; SFB-NOZICOND-NOC-NEXT: .LBB6_2: # %entry
+; SFB-NOZICOND-NOC-NEXT: ret
+;
+; SFB-NOZICOND-C-LABEL: select_xor_2:
+; SFB-NOZICOND-C: # %bb.0: # %entry
+; SFB-NOZICOND-C-NEXT: andi a2, a2, 1
+; SFB-NOZICOND-C-NEXT: beqz a2, .LBB6_2
+; SFB-NOZICOND-C-NEXT: # %bb.1: # %entry
+; SFB-NOZICOND-C-NEXT: xor a0, a0, a1
+; SFB-NOZICOND-C-NEXT: .LBB6_2: # %entry
+; SFB-NOZICOND-C-NEXT: ret
+;
; SFB-ZICOND-LABEL: select_xor_2:
; SFB-ZICOND: # %bb.0: # %entry
; SFB-ZICOND-NEXT: andi a2, a2, 1
@@ -300,6 +318,24 @@ define i32 @select_xor_2b(i32 %A, i32 %B, i8 %cond) {
; CMOV-NEXT: .LBB7_2: # %entry
; CMOV-NEXT: ret
;
+; SFB-NOZICOND-NOC-LABEL: select_xor_2b:
+; SFB-NOZICOND-NOC: # %bb.0: # %entry
+; SFB-NOZICOND-NOC-NEXT: andi a2, a2, 1
+; SFB-NOZICOND-NOC-NEXT: beqz a2, .LBB7_2
+; SFB-NOZICOND-NOC-NEXT: # %bb.1: # %entry
+; SFB-NOZICOND-NOC-NEXT: xor a0, a1, a0
+; SFB-NOZICOND-NOC-NEXT: .LBB7_2: # %entry
+; SFB-NOZICOND-NOC-NEXT: ret
+;
+; SFB-NOZICOND-C-LABEL: select_xor_2b:
+; SFB-NOZICOND-C: # %bb.0: # %entry
+; SFB-NOZICOND-C-NEXT: andi a2, a2, 1
+; SFB-NOZICOND-C-NEXT: beqz a2, .LBB7_2
+; SFB-NOZICOND-C-NEXT: # %bb.1: # %entry
+; SFB-NOZICOND-C-NEXT: xor a0, a0, a1
+; SFB-NOZICOND-C-NEXT: .LBB7_2: # %entry
+; SFB-NOZICOND-C-NEXT: ret
+;
; SFB-ZICOND-LABEL: select_xor_2b:
; SFB-ZICOND: # %bb.0: # %entry
; SFB-ZICOND-NEXT: andi a2, a2, 1
@@ -335,6 +371,24 @@ define i32 @select_or(i32 %A, i32 %B, i8 %cond) {
; CMOV-NEXT: .LBB8_2: # %entry
; CMOV-NEXT: ret
;
+; SFB-NOZICOND-NOC-LABEL: select_or:
+; SFB-NOZICOND-NOC: # %bb.0: # %entry
+; SFB-NOZICOND-NOC-NEXT: andi a2, a2, 1
+; SFB-NOZICOND-NOC-NEXT: beqz a2, .LBB8_2
+; SFB-NOZICOND-NOC-NEXT: # %bb.1: # %entry
+; SFB-NOZICOND-NOC-NEXT: or a0, a1, a0
+; SFB-NOZICOND-NOC-NEXT: .LBB8_2: # %entry
+; SFB-NOZICOND-NOC-NEXT: ret
+;
+; SFB-NOZICOND-C-LABEL: select_or:
+; SFB-NOZICOND-C: # %bb.0: # %entry
+; SFB-NOZICOND-C-NEXT: andi a2, a2, 1
+; SFB-NOZICOND-C-NEXT: beqz a2, .LBB8_2
+; SFB-NOZICOND-C-NEXT: # %bb.1: # %entry
+; SFB-NOZICOND-C-NEXT: or a0, a0, a1
+; SFB-NOZICOND-C-NEXT: .LBB8_2: # %entry
+; SFB-NOZICOND-C-NEXT: ret
+;
; SFB-ZICOND-LABEL: select_or:
; SFB-ZICOND: # %bb.0: # %entry
; SFB-ZICOND-NEXT: andi a2, a2, 1
@@ -372,6 +426,24 @@ define i32 @select_or_b(i32 %A, i32 %B, i8 %cond) {
; CMOV-NEXT: .LBB9_2: # %entry
; CMOV-NEXT: ret
;
+; SFB-NOZICOND-NOC-LABEL: select_or_b:
+; SFB-NOZICOND-NOC: # %bb.0: # %entry
+; SFB-NOZICOND-NOC-NEXT: andi a2, a2, 1
+; SFB-NOZICOND-NOC-NEXT: beqz a2, .LBB9_2
+; SFB-NOZICOND-NOC-NEXT: # %bb.1: # %entry
+; SFB-NOZICOND-NOC-NEXT: or a0, a1, a0
+; SFB-NOZICOND-NOC-NEXT: .LBB9_2: # %entry
+; SFB-NOZICOND-NOC-NEXT: ret
+;
+; SFB-NOZICOND-C-LABEL: select_or_b:
+; SFB-NOZICOND-C: # %bb.0: # %entry
+; SFB-NOZICOND-C-NEXT: andi a2, a2, 1
+; SFB-NOZICOND-C-NEXT: beqz a2, .LBB9_2
+; SFB-NOZICOND-C-NEXT: # %bb.1: # %entry
+; SFB-NOZICOND-C-NEXT: or a0, a0, a1
+; SFB-NOZICOND-C-NEXT: .LBB9_2: # %entry
+; SFB-NOZICOND-C-NEXT: ret
+;
; SFB-ZICOND-LABEL: select_or_b:
; SFB-ZICOND: # %bb.0: # %entry
; SFB-ZICOND-NEXT: andi a2, a2, 1
@@ -407,6 +479,24 @@ define i32 @select_or_1(i32 %A, i32 %B, i32 %cond) {
; CMOV-NEXT: .LBB10_2: # %entry
; CMOV-NEXT: ret
;
+; SFB-NOZICOND-NOC-LABEL: select_or_1:
+; SFB-NOZICOND-NOC: # %bb.0: # %entry
+; SFB-NOZICOND-NOC-NEXT: andi a2, a2, 1
+; SFB-NOZICOND-NOC-NEXT: beqz a2, .LBB10_2
+; SFB-NOZICOND-NOC-NEXT: # %bb.1: # %entry
+; SFB-NOZICOND-NOC-NEXT: or a0, a1, a0
+; SFB-NOZICOND-NOC-NEXT: .LBB10_2: # %entry
+; SFB-NOZICOND-NOC-NEXT: ret
+;
+; SFB-NOZICOND-C-LABEL: select_or_1:
+; SFB-NOZICOND-C: # %bb.0: # %entry
+; SFB-NOZICOND-C-NEXT: andi a2, a2, 1
+; SFB-NOZICOND-C-NEXT: beqz a2, .LBB10_2
+; SFB-NOZICOND-C-NEXT: # %bb.1: # %entry
+; SFB-NOZICOND-C-NEXT: or a0, a0, a1
+; SFB-NOZICOND-C-NEXT: .LBB10_2: # %entry
+; SFB-NOZICOND-C-NEXT: ret
+;
; SFB-ZICOND-LABEL: select_or_1:
; SFB-ZICOND: # %bb.0: # %entry
; SFB-ZICOND-NEXT: andi a2, a2, 1
@@ -444,6 +534,24 @@ define i32 @select_or_1b(i32 %A, i32 %B, i32 %cond) {
; CMOV-NEXT: .LBB11_2: # %entry
; CMOV-NEXT: ret
;
+; SFB-NOZICOND-NOC-LABEL: select_or_1b:
+; SFB-NOZICOND-NOC: # %bb.0: # %entry
+; SFB-NOZICOND-NOC-NEXT: andi a2, a2, 1
+; SFB-NOZICOND-NOC-NEXT: beqz a2, .LBB11_2
+; SFB-NOZICOND-NOC-NEXT: # %bb.1: # %entry
+; SFB-NOZICOND-NOC-NEXT: or a0, a1, a0
+; SFB-NOZICOND-NOC-NEXT: .LBB11_2: # %entry
+; SFB-NOZICOND-NOC-NEXT: ret
+;
+; SFB-NOZICOND-C-LABEL: select_or_1b:
+; SFB-NOZICOND-C: # %bb.0: # %entry
+; SFB-NOZICOND-C-NEXT: andi a2, a2, 1
+; SFB-NOZICOND-C-NEXT: beqz a2, .LBB11_2
+; SFB-NOZICOND-C-NEXT: # %bb.1: # %entry
+; SFB-NOZICOND-C-NEXT: or a0, a0, a1
+; SFB-NOZICOND-C-NEXT: .LBB11_2: # %entry
+; SFB-NOZICOND-C-NEXT: ret
+;
; SFB-ZICOND-LABEL: select_or_1b:
; SFB-ZICOND: # %bb.0: # %entry
; SFB-ZICOND-NEXT: andi a2, a2, 1
diff --git a/llvm/test/CodeGen/RISCV/double-select-fcmp.ll b/llvm/test/CodeGen/RISCV/double-select-fcmp.ll
index 1deea55b083ce..cd3ff779d8cd3 100644
--- a/llvm/test/CodeGen/RISCV/double-select-fcmp.ll
+++ b/llvm/test/CodeGen/RISCV/double-select-fcmp.ll
@@ -1,8 +1,8 @@
; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
; RUN: llc -mtriple=riscv32 -mattr=+d -verify-machineinstrs < %s \
-; RUN: -target-abi=ilp32d | FileCheck %s
+; RUN: -target-abi=ilp32d | FileCheck --check-prefixes=CHECK,RV32D %s
; RUN: llc -mtriple=riscv64 -mattr=+d -verify-machineinstrs < %s \
-; RUN: -target-abi=lp64d | FileCheck %s
+; RUN: -target-abi=lp64d | FileCheck --check-prefixes=CHECK,RV64D %s
; RUN: llc -mtriple=riscv32 -mattr=+zdinx -verify-machineinstrs < %s \
; RUN: -target-abi=ilp32 | FileCheck --check-prefix=CHECKRV32ZDINX %s
; RUN: llc -mtriple=riscv64 -mattr=+zdinx -verify-machineinstrs < %s \
@@ -640,6 +640,39 @@ define signext i32 @select_fcmp_uge_1_2(double %a, double %b) nounwind {
}
define double @CascadedSelect(double noundef %a) {
+; RV32D-LABEL: CascadedSelect:
+; RV32D: # %bb.0: # %entry
+; RV32D-NEXT: lui a0, %hi(.LCPI20_0)
+; RV32D-NEXT: fld fa5, %lo(.LCPI20_0)(a0)
+; RV32D-NEXT: flt.d a0, fa5, fa0
+; RV32D-NEXT: bnez a0, .LBB20_3
+; RV32D-NEXT: # %bb.1: # %entry
+; RV32D-NEXT: fcvt.d.w fa5, zero
+; RV32D-NEXT: flt.d a0, fa0, fa5
+; RV32D-NEXT: bnez a0, .LBB20_3
+; RV32D-NEXT: # %bb.2: # %entry
+; RV32D-NEXT: fmv.d fa5, fa0
+; RV32D-NEXT: .LBB20_3: # %entry
+; RV32D-NEXT: fmv.d fa0, fa5
+; RV32D-NEXT: ret
+;
+; RV64D-LABEL: CascadedSelect:
+; RV64D: # %bb.0: # %entry
+; RV64D-NEXT: li a0, 1023
+; RV64D-NEXT: slli a0, a0, 52
+; RV64D-NEXT: fmv.d.x fa5, a0
+; RV64D-NEXT: flt.d a0, fa5, fa0
+; RV64D-NEXT: bnez a0, .LBB20_3
+; RV64D-NEXT: # %bb.1: # %entry
+; RV64D-NEXT: fmv.d.x fa5, zero
+; RV64D-NEXT: flt.d a0, fa0, fa5
+; RV64D-NEXT: bnez a0, .LBB20_3
+; RV64D-NEXT: # %bb.2: # %entry
+; RV64D-NEXT: fmv.d fa5, fa0
+; RV64D-NEXT: .LBB20_3: # %entry
+; RV64D-NEXT: fmv.d fa0, fa5
+; RV64D-NEXT: ret
+;
; CHECKRV32ZDINX-LABEL: CascadedSelect:
; CHECKRV32ZDINX: # %bb.0: # %entry
; CHECKRV32ZDINX-NEXT: lui a3, %hi(.LCPI20_0)
diff --git a/llvm/test/CodeGen/RISCV/float-intrinsics.ll b/llvm/test/CodeGen/RISCV/float-intrinsics.ll
index 8b8a3257a0027..b1230ae9dd6bf 100644
--- a/llvm/test/CodeGen/RISCV/float-intrinsics.ll
+++ b/llvm/test/CodeGen/RISCV/float-intrinsics.ll
@@ -16,7 +16,7 @@
; RUN: | FileCheck -check-prefix=RV64IZFINX %s
; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -mattr=+d \
; RUN: -verify-machineinstrs -target-abi=lp64d \
-; RUN: | FileCheck -check-prefix=RV64IF %s
+; RUN: | FileCheck -check-prefixes=RV64IFD %s
; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=riscv32 \
; RUN: -verify-machineinstrs | FileCheck -check-prefix=RV32I %s
; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 \
@@ -45,6 +45,11 @@ define float @sqrt_f32(float %a) nounwind {
; RV64IZFINX-NEXT: fsqrt.s a0, a0
; RV64IZFINX-NEXT: ret
;
+; RV64IFD-LABEL: sqrt_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fsqrt.s fa0, fa0
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: sqrt_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -97,6 +102,16 @@ define float @powi_f32(float %a, i32 %b) nounwind {
; RV64IZFINX-NEXT: addi sp, sp, 16
; RV64IZFINX-NEXT: ret
;
+; RV64IFD-LABEL: powi_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: addi sp, sp, -16
+; RV64IFD-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
+; RV64IFD-NEXT: sext.w a0, a0
+; RV64IFD-NEXT: call __powisf2
+; RV64IFD-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
+; RV64IFD-NEXT: addi sp, sp, 16
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: powi_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -138,6 +153,10 @@ define float @sin_f32(float %a) nounwind {
; RV64IZFINX: # %bb.0:
; RV64IZFINX-NEXT: tail sinf
;
+; RV64IFD-LABEL: sin_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: tail sinf
+;
; RV32I-LABEL: sin_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -178,6 +197,10 @@ define float @cos_f32(float %a) nounwind {
; RV64IZFINX: # %bb.0:
; RV64IZFINX-NEXT: tail cosf
;
+; RV64IFD-LABEL: cos_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: tail cosf
+;
; RV32I-LABEL: cos_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -237,6 +260,24 @@ define float @sincos_f32(float %a) nounwind {
; RV32IZFINX-NEXT: addi sp, sp, 16
; RV32IZFINX-NEXT: ret
;
+; RV64IF-LABEL: sincos_f32:
+; RV64IF: # %bb.0:
+; RV64IF-NEXT: addi sp, sp, -16
+; RV64IF-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
+; RV64IF-NEXT: fsw fs0, 4(sp) # 4-byte Folded Spill
+; RV64IF-NEXT: fsw fs1, 0(sp) # 4-byte Folded Spill
+; RV64IF-NEXT: fmv.s fs0, fa0
+; RV64IF-NEXT: call sinf
+; RV64IF-NEXT: fmv.s fs1, fa0
+; RV64IF-NEXT: fmv.s fa0, fs0
+; RV64IF-NEXT: call cosf
+; RV64IF-NEXT: fadd.s fa0, fs1, fa0
+; RV64IF-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
+; RV64IF-NEXT: flw fs0, 4(sp) # 4-byte Folded Reload
+; RV64IF-NEXT: flw fs1, 0(sp) # 4-byte Folded Reload
+; RV64IF-NEXT: addi sp, sp, 16
+; RV64IF-NEXT: ret
+;
; RV64IZFINX-LABEL: sincos_f32:
; RV64IZFINX: # %bb.0:
; RV64IZFINX-NEXT: addi sp, sp, -32
@@ -255,6 +296,24 @@ define float @sincos_f32(float %a) nounwind {
; RV64IZFINX-NEXT: addi sp, sp, 32
; RV64IZFINX-NEXT: ret
;
+; RV64IFD-LABEL: sincos_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: addi sp, sp, -32
+; RV64IFD-NEXT: sd ra, 24(sp) # 8-byte Folded Spill
+; RV64IFD-NEXT: fsd fs0, 16(sp) # 8-byte Folded Spill
+; RV64IFD-NEXT: fsd fs1, 8(sp) # 8-byte Folded Spill
+; RV64IFD-NEXT: fmv.s fs0, fa0
+; RV64IFD-NEXT: call sinf
+; RV64IFD-NEXT: fmv.s fs1, fa0
+; RV64IFD-NEXT: fmv.s fa0, fs0
+; RV64IFD-NEXT: call cosf
+; RV64IFD-NEXT: fadd.s fa0, fs1, fa0
+; RV64IFD-NEXT: ld ra, 24(sp) # 8-byte Folded Reload
+; RV64IFD-NEXT: fld fs0, 16(sp) # 8-byte Folded Reload
+; RV64IFD-NEXT: fld fs1, 8(sp) # 8-byte Folded Reload
+; RV64IFD-NEXT: addi sp, sp, 32
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: sincos_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -319,6 +378,10 @@ define float @pow_f32(float %a, float %b) nounwind {
; RV64IZFINX: # %bb.0:
; RV64IZFINX-NEXT: tail powf
;
+; RV64IFD-LABEL: pow_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: tail powf
+;
; RV32I-LABEL: pow_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -359,6 +422,10 @@ define float @exp_f32(float %a) nounwind {
; RV64IZFINX: # %bb.0:
; RV64IZFINX-NEXT: tail expf
;
+; RV64IFD-LABEL: exp_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: tail expf
+;
; RV32I-LABEL: exp_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -399,6 +466,10 @@ define float @exp2_f32(float %a) nounwind {
; RV64IZFINX: # %bb.0:
; RV64IZFINX-NEXT: tail exp2f
;
+; RV64IFD-LABEL: exp2_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: tail exp2f
+;
; RV32I-LABEL: exp2_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -437,6 +508,10 @@ define float @exp10_f32(float %a) nounwind {
; RV64IZFINX: # %bb.0:
; RV64IZFINX-NEXT: tail exp10f
;
+; RV64IFD-LABEL: exp10_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: tail exp10f
+;
; RV32I-LABEL: exp10_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -477,6 +552,10 @@ define float @log_f32(float %a) nounwind {
; RV64IZFINX: # %bb.0:
; RV64IZFINX-NEXT: tail logf
;
+; RV64IFD-LABEL: log_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: tail logf
+;
; RV32I-LABEL: log_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -517,6 +596,10 @@ define float @log10_f32(float %a) nounwind {
; RV64IZFINX: # %bb.0:
; RV64IZFINX-NEXT: tail log10f
;
+; RV64IFD-LABEL: log10_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: tail log10f
+;
; RV32I-LABEL: log10_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -557,6 +640,10 @@ define float @log2_f32(float %a) nounwind {
; RV64IZFINX: # %bb.0:
; RV64IZFINX-NEXT: tail log2f
;
+; RV64IFD-LABEL: log2_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: tail log2f
+;
; RV32I-LABEL: log2_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -601,6 +688,11 @@ define float @fma_f32(float %a, float %b, float %c) nounwind {
; RV64IZFINX-NEXT: fmadd.s a0, a0, a1, a2
; RV64IZFINX-NEXT: ret
;
+; RV64IFD-LABEL: fma_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fmadd.s fa0, fa0, fa1, fa2
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: fma_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -645,6 +737,11 @@ define float @fmuladd_f32(float %a, float %b, float %c) nounwind {
; RV64IZFINX-NEXT: fmadd.s a0, a0, a1, a2
; RV64IZFINX-NEXT: ret
;
+; RV64IFD-LABEL: fmuladd_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fmadd.s fa0, fa0, fa1, fa2
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: fmuladd_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -699,6 +796,11 @@ define float @fabs_f32(float %a) nounwind {
; RV64IZFINX-NEXT: fabs.s a0, a0
; RV64IZFINX-NEXT: ret
;
+; RV64IFD-LABEL: fabs_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fabs.s fa0, fa0
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: fabs_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: slli a0, a0, 1
@@ -737,6 +839,11 @@ define float @minnum_f32(float %a, float %b) nounwind {
; RV64IZFINX-NEXT: fmin.s a0, a0, a1
; RV64IZFINX-NEXT: ret
;
+; RV64IFD-LABEL: minnum_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fmin.s fa0, fa0, fa1
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: minnum_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -781,6 +888,11 @@ define float @maxnum_f32(float %a, float %b) nounwind {
; RV64IZFINX-NEXT: fmax.s a0, a0, a1
; RV64IZFINX-NEXT: ret
;
+; RV64IFD-LABEL: maxnum_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fmax.s fa0, fa0, fa1
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: maxnum_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -825,6 +937,11 @@ define float @copysign_f32(float %a, float %b) nounwind {
; RV64IZFINX-NEXT: fsgnj.s a0, a0, a1
; RV64IZFINX-NEXT: ret
;
+; RV64IFD-LABEL: copysign_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fsgnj.s fa0, fa0, fa1
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: copysign_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: lui a2, 524288
@@ -903,6 +1020,20 @@ define float @floor_f32(float %a) nounwind {
; RV64IZFINX-NEXT: .LBB18_2:
; RV64IZFINX-NEXT: ret
;
+; RV64IFD-LABEL: floor_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: lui a0, 307200
+; RV64IFD-NEXT: fmv.w.x fa5, a0
+; RV64IFD-NEXT: fabs.s fa4, fa0
+; RV64IFD-NEXT: flt.s a0, fa4, fa5
+; RV64IFD-NEXT: beqz a0, .LBB18_2
+; RV64IFD-NEXT: # %bb.1:
+; RV64IFD-NEXT: fcvt.w.s a0, fa0, rdn
+; RV64IFD-NEXT: fcvt.s.w fa5, a0, rdn
+; RV64IFD-NEXT: fsgnj.s fa0, fa5, fa0
+; RV64IFD-NEXT: .LBB18_2:
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: floor_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -981,6 +1112,20 @@ define float @ceil_f32(float %a) nounwind {
; RV64IZFINX-NEXT: .LBB19_2:
; RV64IZFINX-NEXT: ret
;
+; RV64IFD-LABEL: ceil_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: lui a0, 307200
+; RV64IFD-NEXT: fmv.w.x fa5, a0
+; RV64IFD-NEXT: fabs.s fa4, fa0
+; RV64IFD-NEXT: flt.s a0, fa4, fa5
+; RV64IFD-NEXT: beqz a0, .LBB19_2
+; RV64IFD-NEXT: # %bb.1:
+; RV64IFD-NEXT: fcvt.w.s a0, fa0, rup
+; RV64IFD-NEXT: fcvt.s.w fa5, a0, rup
+; RV64IFD-NEXT: fsgnj.s fa0, fa5, fa0
+; RV64IFD-NEXT: .LBB19_2:
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: ceil_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -1059,6 +1204,20 @@ define float @trunc_f32(float %a) nounwind {
; RV64IZFINX-NEXT: .LBB20_2:
; RV64IZFINX-NEXT: ret
;
+; RV64IFD-LABEL: trunc_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: lui a0, 307200
+; RV64IFD-NEXT: fmv.w.x fa5, a0
+; RV64IFD-NEXT: fabs.s fa4, fa0
+; RV64IFD-NEXT: flt.s a0, fa4, fa5
+; RV64IFD-NEXT: beqz a0, .LBB20_2
+; RV64IFD-NEXT: # %bb.1:
+; RV64IFD-NEXT: fcvt.w.s a0, fa0, rtz
+; RV64IFD-NEXT: fcvt.s.w fa5, a0, rtz
+; RV64IFD-NEXT: fsgnj.s fa0, fa5, fa0
+; RV64IFD-NEXT: .LBB20_2:
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: trunc_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -1137,6 +1296,20 @@ define float @rint_f32(float %a) nounwind {
; RV64IZFINX-NEXT: .LBB21_2:
; RV64IZFINX-NEXT: ret
;
+; RV64IFD-LABEL: rint_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: lui a0, 307200
+; RV64IFD-NEXT: fmv.w.x fa5, a0
+; RV64IFD-NEXT: fabs.s fa4, fa0
+; RV64IFD-NEXT: flt.s a0, fa4, fa5
+; RV64IFD-NEXT: beqz a0, .LBB21_2
+; RV64IFD-NEXT: # %bb.1:
+; RV64IFD-NEXT: fcvt.w.s a0, fa0
+; RV64IFD-NEXT: fcvt.s.w fa5, a0
+; RV64IFD-NEXT: fsgnj.s fa0, fa5, fa0
+; RV64IFD-NEXT: .LBB21_2:
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: rint_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -1177,6 +1350,10 @@ define float @nearbyint_f32(float %a) nounwind {
; RV64IZFINX: # %bb.0:
; RV64IZFINX-NEXT: tail nearbyintf
;
+; RV64IFD-LABEL: nearbyint_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: tail nearbyintf
+;
; RV32I-LABEL: nearbyint_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -1255,6 +1432,20 @@ define float @round_f32(float %a) nounwind {
; RV64IZFINX-NEXT: .LBB23_2:
; RV64IZFINX-NEXT: ret
;
+; RV64IFD-LABEL: round_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: lui a0, 307200
+; RV64IFD-NEXT: fmv.w.x fa5, a0
+; RV64IFD-NEXT: fabs.s fa4, fa0
+; RV64IFD-NEXT: flt.s a0, fa4, fa5
+; RV64IFD-NEXT: beqz a0, .LBB23_2
+; RV64IFD-NEXT: # %bb.1:
+; RV64IFD-NEXT: fcvt.w.s a0, fa0, rmm
+; RV64IFD-NEXT: fcvt.s.w fa5, a0, rmm
+; RV64IFD-NEXT: fsgnj.s fa0, fa5, fa0
+; RV64IFD-NEXT: .LBB23_2:
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: round_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -1333,6 +1524,20 @@ define float @roundeven_f32(float %a) nounwind {
; RV64IZFINX-NEXT: .LBB24_2:
; RV64IZFINX-NEXT: ret
;
+; RV64IFD-LABEL: roundeven_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: lui a0, 307200
+; RV64IFD-NEXT: fmv.w.x fa5, a0
+; RV64IFD-NEXT: fabs.s fa4, fa0
+; RV64IFD-NEXT: flt.s a0, fa4, fa5
+; RV64IFD-NEXT: beqz a0, .LBB24_2
+; RV64IFD-NEXT: # %bb.1:
+; RV64IFD-NEXT: fcvt.w.s a0, fa0, rne
+; RV64IFD-NEXT: fcvt.s.w fa5, a0, rne
+; RV64IFD-NEXT: fsgnj.s fa0, fa5, fa0
+; RV64IFD-NEXT: .LBB24_2:
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: roundeven_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -1377,6 +1582,11 @@ define iXLen @lrint_f32(float %a) nounwind {
; RV64IZFINX-NEXT: fcvt.l.s a0, a0
; RV64IZFINX-NEXT: ret
;
+; RV64IFD-LABEL: lrint_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fcvt.l.s a0, fa0
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: lrint_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -1422,6 +1632,11 @@ define iXLen @lround_f32(float %a) nounwind {
; RV64IZFINX-NEXT: fcvt.l.s a0, a0, rmm
; RV64IZFINX-NEXT: ret
;
+; RV64IFD-LABEL: lround_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fcvt.l.s a0, fa0, rmm
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: lround_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -1466,6 +1681,11 @@ define i32 @lround_i32_f32(float %a) nounwind {
; RV64IZFINX-NEXT: fcvt.w.s a0, a0, rmm
; RV64IZFINX-NEXT: ret
;
+; RV64IFD-LABEL: lround_i32_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fcvt.w.s a0, fa0, rmm
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: lround_i32_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -1518,6 +1738,11 @@ define i64 @llrint_f32(float %a) nounwind {
; RV64IZFINX-NEXT: fcvt.l.s a0, a0
; RV64IZFINX-NEXT: ret
;
+; RV64IFD-LABEL: llrint_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fcvt.l.s a0, fa0
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: llrint_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -1570,6 +1795,11 @@ define i64 @llround_f32(float %a) nounwind {
; RV64IZFINX-NEXT: fcvt.l.s a0, a0, rmm
; RV64IZFINX-NEXT: ret
;
+; RV64IFD-LABEL: llround_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fcvt.l.s a0, fa0, rmm
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: llround_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -1621,6 +1851,13 @@ define i1 @fpclass(float %x) {
; RV64IZFINX-NEXT: snez a0, a0
; RV64IZFINX-NEXT: ret
;
+; RV64IFD-LABEL: fpclass:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fclass.s a0, fa0
+; RV64IFD-NEXT: andi a0, a0, 927
+; RV64IFD-NEXT: snez a0, a0
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: fpclass:
; RV32I: # %bb.0:
; RV32I-NEXT: slli a1, a0, 1
@@ -1705,6 +1942,13 @@ define i1 @isnan_fpclass(float %x) {
; RV64IZFINX-NEXT: snez a0, a0
; RV64IZFINX-NEXT: ret
;
+; RV64IFD-LABEL: isnan_fpclass:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fclass.s a0, fa0
+; RV64IFD-NEXT: andi a0, a0, 768
+; RV64IFD-NEXT: snez a0, a0
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: isnan_fpclass:
; RV32I: # %bb.0:
; RV32I-NEXT: slli a0, a0, 1
@@ -1749,6 +1993,12 @@ define i1 @isqnan_fpclass(float %x) {
; RV64IZFINX-NEXT: srli a0, a0, 9
; RV64IZFINX-NEXT: ret
;
+; RV64IFD-LABEL: isqnan_fpclass:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fclass.s a0, fa0
+; RV64IFD-NEXT: srli a0, a0, 9
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: isqnan_fpclass:
; RV32I: # %bb.0:
; RV32I-NEXT: slli a0, a0, 1
@@ -1799,6 +2049,13 @@ define i1 @issnan_fpclass(float %x) {
; RV64IZFINX-NEXT: srli a0, a0, 63
; RV64IZFINX-NEXT: ret
;
+; RV64IFD-LABEL: issnan_fpclass:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fclass.s a0, fa0
+; RV64IFD-NEXT: slli a0, a0, 55
+; RV64IFD-NEXT: srli a0, a0, 63
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: issnan_fpclass:
; RV32I: # %bb.0:
; RV32I-NEXT: slli a0, a0, 1
@@ -1853,6 +2110,13 @@ define i1 @isinf_fpclass(float %x) {
; RV64IZFINX-NEXT: snez a0, a0
; RV64IZFINX-NEXT: ret
;
+; RV64IFD-LABEL: isinf_fpclass:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fclass.s a0, fa0
+; RV64IFD-NEXT: andi a0, a0, 129
+; RV64IFD-NEXT: snez a0, a0
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: isinf_fpclass:
; RV32I: # %bb.0:
; RV32I-NEXT: slli a0, a0, 1
@@ -1903,6 +2167,13 @@ define i1 @isposinf_fpclass(float %x) {
; RV64IZFINX-NEXT: srli a0, a0, 63
; RV64IZFINX-NEXT: ret
;
+; RV64IFD-LABEL: isposinf_fpclass:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fclass.s a0, fa0
+; RV64IFD-NEXT: slli a0, a0, 56
+; RV64IFD-NEXT: srli a0, a0, 63
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: isposinf_fpclass:
; RV32I: # %bb.0:
; RV32I-NEXT: lui a1, 522240
@@ -1946,6 +2217,12 @@ define i1 @isneginf_fpclass(float %x) {
; RV64IZFINX-NEXT: andi a0, a0, 1
; RV64IZFINX-NEXT: ret
;
+; RV64IFD-LABEL: isneginf_fpclass:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fclass.s a0, fa0
+; RV64IFD-NEXT: andi a0, a0, 1
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: isneginf_fpclass:
; RV32I: # %bb.0:
; RV32I-NEXT: lui a1, 1046528
@@ -1993,6 +2270,13 @@ define i1 @isfinite_fpclass(float %x) {
; RV64IZFINX-NEXT: snez a0, a0
; RV64IZFINX-NEXT: ret
;
+; RV64IFD-LABEL: isfinite_fpclass:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fclass.s a0, fa0
+; RV64IFD-NEXT: andi a0, a0, 126
+; RV64IFD-NEXT: snez a0, a0
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: isfinite_fpclass:
; RV32I: # %bb.0:
; RV32I-NEXT: slli a0, a0, 1
@@ -2041,6 +2325,13 @@ define i1 @isposfinite_fpclass(float %x) {
; RV64IZFINX-NEXT: snez a0, a0
; RV64IZFINX-NEXT: ret
;
+; RV64IFD-LABEL: isposfinite_fpclass:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fclass.s a0, fa0
+; RV64IFD-NEXT: andi a0, a0, 112
+; RV64IFD-NEXT: snez a0, a0
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: isposfinite_fpclass:
; RV32I: # %bb.0:
; RV32I-NEXT: srli a0, a0, 23
@@ -2085,6 +2376,13 @@ define i1 @isnegfinite_fpclass(float %x) {
; RV64IZFINX-NEXT: snez a0, a0
; RV64IZFINX-NEXT: ret
;
+; RV64IFD-LABEL: isnegfinite_fpclass:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fclass.s a0, fa0
+; RV64IFD-NEXT: andi a0, a0, 14
+; RV64IFD-NEXT: snez a0, a0
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: isnegfinite_fpclass:
; RV32I: # %bb.0:
; RV32I-NEXT: slli a1, a0, 1
@@ -2137,6 +2435,13 @@ define i1 @isnotfinite_fpclass(float %x) {
; RV64IZFINX-NEXT: snez a0, a0
; RV64IZFINX-NEXT: ret
;
+; RV64IFD-LABEL: isnotfinite_fpclass:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fclass.s a0, fa0
+; RV64IFD-NEXT: andi a0, a0, 897
+; RV64IFD-NEXT: snez a0, a0
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: isnotfinite_fpclass:
; RV32I: # %bb.0:
; RV32I-NEXT: slli a0, a0, 1
@@ -2175,6 +2480,10 @@ define float @tan_f32(float %a) nounwind {
; RV64IZFINX: # %bb.0:
; RV64IZFINX-NEXT: tail tanf
;
+; RV64IFD-LABEL: tan_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: tail tanf
+;
; RV32I-LABEL: tan_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -2219,6 +2528,11 @@ define float @maximumnum_float(float %x, float %y) {
; RV64IZFINX-NEXT: fmax.s a0, a0, a1
; RV64IZFINX-NEXT: ret
;
+; RV64IFD-LABEL: maximumnum_float:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fmax.s fa0, fa0, fa1
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: maximumnum_float:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -2271,6 +2585,11 @@ define float @minimumnum_float(float %x, float %y) {
; RV64IZFINX-NEXT: fmin.s a0, a0, a1
; RV64IZFINX-NEXT: ret
;
+; RV64IFD-LABEL: minimumnum_float:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: fmin.s fa0, fa0, fa1
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: minimumnum_float:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -2327,6 +2646,15 @@ define float @ldexp_float(float %x, i32 signext %y) nounwind {
; RV64IZFINX-NEXT: addi sp, sp, 16
; RV64IZFINX-NEXT: ret
;
+; RV64IFD-LABEL: ldexp_float:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: addi sp, sp, -16
+; RV64IFD-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
+; RV64IFD-NEXT: call ldexpf
+; RV64IFD-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
+; RV64IFD-NEXT: addi sp, sp, 16
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: ldexp_float:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -2393,6 +2721,17 @@ define {float, i32} @frexp_float(float %x) nounwind {
; RV64IZFINX-NEXT: addi sp, sp, 16
; RV64IZFINX-NEXT: ret
;
+; RV64IFD-LABEL: frexp_float:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: addi sp, sp, -16
+; RV64IFD-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
+; RV64IFD-NEXT: mv a0, sp
+; RV64IFD-NEXT: call frexpf
+; RV64IFD-NEXT: ld a0, 0(sp)
+; RV64IFD-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
+; RV64IFD-NEXT: addi sp, sp, 16
+; RV64IFD-NEXT: ret
+;
; RV32I-LABEL: frexp_float:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -2435,6 +2774,10 @@ define float @asin_f32(float %a) nounwind {
; RV64IZFINX: # %bb.0:
; RV64IZFINX-NEXT: tail asinf
;
+; RV64IFD-LABEL: asin_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: tail asinf
+;
; RV32I-LABEL: asin_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -2473,6 +2816,10 @@ define float @acos_f32(float %a) nounwind {
; RV64IZFINX: # %bb.0:
; RV64IZFINX-NEXT: tail acosf
;
+; RV64IFD-LABEL: acos_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: tail acosf
+;
; RV32I-LABEL: acos_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -2511,6 +2858,10 @@ define float @atan_f32(float %a) nounwind {
; RV64IZFINX: # %bb.0:
; RV64IZFINX-NEXT: tail atanf
;
+; RV64IFD-LABEL: atan_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: tail atanf
+;
; RV32I-LABEL: atan_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -2549,6 +2900,10 @@ define float @atan2_f32(float %a, float %b) nounwind {
; RV64IZFINX: # %bb.0:
; RV64IZFINX-NEXT: tail atan2f
;
+; RV64IFD-LABEL: atan2_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: tail atan2f
+;
; RV32I-LABEL: atan2_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -2587,6 +2942,10 @@ define float @sinh_f32(float %a) nounwind {
; RV64IZFINX: # %bb.0:
; RV64IZFINX-NEXT: tail sinhf
;
+; RV64IFD-LABEL: sinh_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: tail sinhf
+;
; RV32I-LABEL: sinh_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -2625,6 +2984,10 @@ define float @cosh_f32(float %a) nounwind {
; RV64IZFINX: # %bb.0:
; RV64IZFINX-NEXT: tail coshf
;
+; RV64IFD-LABEL: cosh_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: tail coshf
+;
; RV32I-LABEL: cosh_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
@@ -2663,6 +3026,10 @@ define float @tanh_f32(float %a) nounwind {
; RV64IZFINX: # %bb.0:
; RV64IZFINX-NEXT: tail tanhf
;
+; RV64IFD-LABEL: tanh_f32:
+; RV64IFD: # %bb.0:
+; RV64IFD-NEXT: tail tanhf
+;
; RV32I-LABEL: tanh_f32:
; RV32I: # %bb.0:
; RV32I-NEXT: addi sp, sp, -16
diff --git a/llvm/test/CodeGen/RISCV/idiv_large.ll b/llvm/test/CodeGen/RISCV/idiv_large.ll
index fb7e4a4d103d0..9937627962208 100644
--- a/llvm/test/CodeGen/RISCV/idiv_large.ll
+++ b/llvm/test/CodeGen/RISCV/idiv_large.ll
@@ -1,4 +1,3 @@
-; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
; RUN: llc -mtriple=riscv32 < %s | FileCheck %s
; RUN: llc -mtriple=riscv64 < %s | FileCheck %s
diff --git a/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-fp-setcc.ll b/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-fp-setcc.ll
index abb929eaaf6e6..e256ba9dd5997 100644
--- a/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-fp-setcc.ll
+++ b/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-fp-setcc.ll
@@ -1,8 +1,8 @@
; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
; RUN: llc -mtriple=riscv32 -target-abi=ilp32d -mattr=+v,+zfh,+zvfh,+f,+d -verify-machineinstrs < %s | FileCheck %s --check-prefixes=CHECK,ZVFH
; RUN: llc -mtriple=riscv64 -target-abi=lp64d -mattr=+v,+zfh,+zvfh,+f,+d -verify-machineinstrs < %s | FileCheck %s --check-prefixes=CHECK,ZVFH
-; RUN: llc -mtriple=riscv32 -target-abi=ilp32d -mattr=+v,+zfh,+zvfhmin,+f,+d -verify-machineinstrs < %s | FileCheck %s --check-prefixes=CHECK,ZVFHMIN
-; RUN: llc -mtriple=riscv64 -target-abi=lp64d -mattr=+v,+zfh,+zvfhmin,+f,+d -verify-machineinstrs < %s | FileCheck %s --check-prefixes=CHECK,ZVFHMIN
+; RUN: llc -mtriple=riscv32 -target-abi=ilp32d -mattr=+v,+zfh,+zvfhmin,+f,+d -verify-machineinstrs < %s | FileCheck %s --check-prefixes=CHECK,ZVFHMIN,RV32ZVFHMIN
+; RUN: llc -mtriple=riscv64 -target-abi=lp64d -mattr=+v,+zfh,+zvfhmin,+f,+d -verify-machineinstrs < %s | FileCheck %s --check-prefixes=CHECK,ZVFHMIN,RV64ZVFHMIN
define void @fcmp_oeq_vv_v8f16(ptr %x, ptr %y, ptr %z) {
; ZVFH-LABEL: fcmp_oeq_vv_v8f16:
@@ -437,6 +437,1036 @@ define void @fcmp_ugt_vv_v64f16(ptr %x, ptr %y, ptr %z) {
; ZVFH-NEXT: vmnot.m v8, v24
; ZVFH-NEXT: vsm.v v8, (a2)
; ZVFH-NEXT: ret
+;
+; RV32ZVFHMIN-LABEL: fcmp_ugt_vv_v64f16:
+; RV32ZVFHMIN: # %bb.0:
+; RV32ZVFHMIN-NEXT: addi sp, sp, -512
+; RV32ZVFHMIN-NEXT: .cfi_def_cfa_offset 512
+; RV32ZVFHMIN-NEXT: sw ra, 508(sp) # 4-byte Folded Spill
+; RV32ZVFHMIN-NEXT: sw s0, 504(sp) # 4-byte Folded Spill
+; RV32ZVFHMIN-NEXT: .cfi_offset ra, -4
+; RV32ZVFHMIN-NEXT: .cfi_offset s0, -8
+; RV32ZVFHMIN-NEXT: addi s0, sp, 512
+; RV32ZVFHMIN-NEXT: .cfi_def_cfa s0, 0
+; RV32ZVFHMIN-NEXT: andi sp, sp, -128
+; RV32ZVFHMIN-NEXT: li a3, 64
+; RV32ZVFHMIN-NEXT: vsetvli zero, a3, e16, m8, ta, ma
+; RV32ZVFHMIN-NEXT: vle16.v v16, (a1)
+; RV32ZVFHMIN-NEXT: vle16.v v8, (a0)
+; RV32ZVFHMIN-NEXT: addi a0, sp, 128
+; RV32ZVFHMIN-NEXT: addi a1, sp, 256
+; RV32ZVFHMIN-NEXT: vse16.v v16, (a0)
+; RV32ZVFHMIN-NEXT: vse16.v v8, (a1)
+; RV32ZVFHMIN-NEXT: lh a0, 192(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 320(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV32ZVFHMIN-NEXT: fle.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 96(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 190(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 318(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV32ZVFHMIN-NEXT: fle.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 95(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 188(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 316(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV32ZVFHMIN-NEXT: fle.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 94(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 186(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 314(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV32ZVFHMIN-NEXT: fle.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 93(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 184(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 312(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV32ZVFHMIN-NEXT: fle.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 92(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 182(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 310(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV32ZVFHMIN-NEXT: fle.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 91(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 180(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 308(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV32ZVFHMIN-NEXT: fle.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 90(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 178(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 306(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV32ZVFHMIN-NEXT: fle.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 89(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 176(sp)
+; RV32ZVFHMIN-NEXT: lh a4, 304(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s a0, v16
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a1
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a4
+; RV32ZVFHMIN-NEXT: fle.h a1, fa4, fa5
+; RV32ZVFHMIN-NEXT: xori a1, a1, 1
+; RV32ZVFHMIN-NEXT: sb a1, 88(sp)
+; RV32ZVFHMIN-NEXT: lh a4, 174(sp)
+; RV32ZVFHMIN-NEXT: lh a5, 302(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s a1, v8
+; RV32ZVFHMIN-NEXT: vsetivli zero, 1, e16, m1, ta, ma
+; RV32ZVFHMIN-NEXT: vslidedown.vi v12, v16, 7
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a5
+; RV32ZVFHMIN-NEXT: fle.h a4, fa4, fa5
+; RV32ZVFHMIN-NEXT: xori a4, a4, 1
+; RV32ZVFHMIN-NEXT: sb a4, 87(sp)
+; RV32ZVFHMIN-NEXT: lh a4, 172(sp)
+; RV32ZVFHMIN-NEXT: lh a5, 300(sp)
+; RV32ZVFHMIN-NEXT: vslidedown.vi v13, v8, 7
+; RV32ZVFHMIN-NEXT: vslidedown.vi v14, v16, 6
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a5
+; RV32ZVFHMIN-NEXT: fle.h a4, fa4, fa5
+; RV32ZVFHMIN-NEXT: xori a4, a4, 1
+; RV32ZVFHMIN-NEXT: sb a4, 86(sp)
+; RV32ZVFHMIN-NEXT: lh a4, 170(sp)
+; RV32ZVFHMIN-NEXT: lh a5, 298(sp)
+; RV32ZVFHMIN-NEXT: vslidedown.vi v15, v8, 6
+; RV32ZVFHMIN-NEXT: vslidedown.vi v18, v16, 5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a5
+; RV32ZVFHMIN-NEXT: fle.h a4, fa4, fa5
+; RV32ZVFHMIN-NEXT: xori a4, a4, 1
+; RV32ZVFHMIN-NEXT: sb a4, 85(sp)
+; RV32ZVFHMIN-NEXT: lh a4, 168(sp)
+; RV32ZVFHMIN-NEXT: lh a5, 296(sp)
+; RV32ZVFHMIN-NEXT: vslidedown.vi v19, v8, 5
+; RV32ZVFHMIN-NEXT: vslidedown.vi v20, v16, 4
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a5
+; RV32ZVFHMIN-NEXT: fle.h a4, fa4, fa5
+; RV32ZVFHMIN-NEXT: xori a4, a4, 1
+; RV32ZVFHMIN-NEXT: sb a4, 84(sp)
+; RV32ZVFHMIN-NEXT: lh a4, 166(sp)
+; RV32ZVFHMIN-NEXT: lh a5, 294(sp)
+; RV32ZVFHMIN-NEXT: vslidedown.vi v21, v8, 4
+; RV32ZVFHMIN-NEXT: vslidedown.vi v23, v16, 3
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a5
+; RV32ZVFHMIN-NEXT: fle.h a4, fa4, fa5
+; RV32ZVFHMIN-NEXT: xori a4, a4, 1
+; RV32ZVFHMIN-NEXT: sb a4, 83(sp)
+; RV32ZVFHMIN-NEXT: lh a4, 164(sp)
+; RV32ZVFHMIN-NEXT: lh a5, 292(sp)
+; RV32ZVFHMIN-NEXT: vslidedown.vi v22, v8, 3
+; RV32ZVFHMIN-NEXT: vslidedown.vi v25, v16, 2
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a5
+; RV32ZVFHMIN-NEXT: fle.h a4, fa4, fa5
+; RV32ZVFHMIN-NEXT: xori a4, a4, 1
+; RV32ZVFHMIN-NEXT: sb a4, 82(sp)
+; RV32ZVFHMIN-NEXT: lh a4, 162(sp)
+; RV32ZVFHMIN-NEXT: lh a5, 290(sp)
+; RV32ZVFHMIN-NEXT: vslidedown.vi v24, v8, 2
+; RV32ZVFHMIN-NEXT: vslidedown.vi v26, v16, 1
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a5
+; RV32ZVFHMIN-NEXT: fle.h a4, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV32ZVFHMIN-NEXT: xori a0, a4, 1
+; RV32ZVFHMIN-NEXT: sb a0, 81(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 160(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 288(sp)
+; RV32ZVFHMIN-NEXT: fle.h a4, fa4, fa5
+; RV32ZVFHMIN-NEXT: xori a4, a4, 1
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV32ZVFHMIN-NEXT: fle.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a4, 64(sp)
+; RV32ZVFHMIN-NEXT: sb a0, 80(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 226(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 354(sp)
+; RV32ZVFHMIN-NEXT: vslidedown.vi v27, v8, 1
+; RV32ZVFHMIN-NEXT: vsetivli zero, 1, e16, m2, ta, ma
+; RV32ZVFHMIN-NEXT: vslidedown.vi v10, v16, 15
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV32ZVFHMIN-NEXT: fle.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 113(sp)
+; RV32ZVFHMIN-NEXT: lh a4, 224(sp)
+; RV32ZVFHMIN-NEXT: lh a5, 352(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s a1, v12
+; RV32ZVFHMIN-NEXT: vmv.x.s a0, v13
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a5
+; RV32ZVFHMIN-NEXT: fle.h a4, fa4, fa5
+; RV32ZVFHMIN-NEXT: xori a4, a4, 1
+; RV32ZVFHMIN-NEXT: sb a4, 112(sp)
+; RV32ZVFHMIN-NEXT: lh a4, 222(sp)
+; RV32ZVFHMIN-NEXT: lh a6, 350(sp)
+; RV32ZVFHMIN-NEXT: vslidedown.vi v12, v8, 15
+; RV32ZVFHMIN-NEXT: vmv.x.s a5, v14
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a6
+; RV32ZVFHMIN-NEXT: fle.h a4, fa4, fa5
+; RV32ZVFHMIN-NEXT: xori a4, a4, 1
+; RV32ZVFHMIN-NEXT: sb a4, 111(sp)
+; RV32ZVFHMIN-NEXT: lh a4, 220(sp)
+; RV32ZVFHMIN-NEXT: lh a6, 348(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s a7, v15
+; RV32ZVFHMIN-NEXT: vslidedown.vi v14, v16, 14
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a6
+; RV32ZVFHMIN-NEXT: fle.h a4, fa4, fa5
+; RV32ZVFHMIN-NEXT: xori a4, a4, 1
+; RV32ZVFHMIN-NEXT: sb a4, 110(sp)
+; RV32ZVFHMIN-NEXT: lh t0, 218(sp)
+; RV32ZVFHMIN-NEXT: lh t1, 346(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s a6, v18
+; RV32ZVFHMIN-NEXT: vmv.x.s a4, v19
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, t1
+; RV32ZVFHMIN-NEXT: fle.h t0, fa4, fa5
+; RV32ZVFHMIN-NEXT: xori t0, t0, 1
+; RV32ZVFHMIN-NEXT: sb t0, 109(sp)
+; RV32ZVFHMIN-NEXT: lh t0, 216(sp)
+; RV32ZVFHMIN-NEXT: vslidedown.vi v18, v16, 13
+; RV32ZVFHMIN-NEXT: vmv.x.s t1, v20
+; RV32ZVFHMIN-NEXT: lh t2, 344(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV32ZVFHMIN-NEXT: vmv.x.s t0, v21
+; RV32ZVFHMIN-NEXT: vslidedown.vi v20, v16, 12
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, t2
+; RV32ZVFHMIN-NEXT: fle.h t2, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a1
+; RV32ZVFHMIN-NEXT: xori a1, t2, 1
+; RV32ZVFHMIN-NEXT: sb a1, 108(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 214(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a0
+; RV32ZVFHMIN-NEXT: lh t3, 342(sp)
+; RV32ZVFHMIN-NEXT: fle.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a1
+; RV32ZVFHMIN-NEXT: vmv.x.s t2, v23
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, t3
+; RV32ZVFHMIN-NEXT: fle.h a1, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV32ZVFHMIN-NEXT: xori a1, a1, 1
+; RV32ZVFHMIN-NEXT: sb a1, 107(sp)
+; RV32ZVFHMIN-NEXT: lh a5, 212(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a7
+; RV32ZVFHMIN-NEXT: fle.h a1, fa4, fa5
+; RV32ZVFHMIN-NEXT: lh a7, 340(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV32ZVFHMIN-NEXT: vmv.x.s t3, v22
+; RV32ZVFHMIN-NEXT: vslidedown.vi v22, v16, 11
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a7
+; RV32ZVFHMIN-NEXT: fle.h a5, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV32ZVFHMIN-NEXT: xori a5, a5, 1
+; RV32ZVFHMIN-NEXT: sb a5, 106(sp)
+; RV32ZVFHMIN-NEXT: lh a5, 210(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a4
+; RV32ZVFHMIN-NEXT: lh a6, 338(sp)
+; RV32ZVFHMIN-NEXT: fle.h a4, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV32ZVFHMIN-NEXT: vmv.x.s a7, v25
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a6
+; RV32ZVFHMIN-NEXT: fle.h a5, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, t1
+; RV32ZVFHMIN-NEXT: xori a5, a5, 1
+; RV32ZVFHMIN-NEXT: sb a5, 105(sp)
+; RV32ZVFHMIN-NEXT: lh a6, 208(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, t0
+; RV32ZVFHMIN-NEXT: fle.h a5, fa4, fa5
+; RV32ZVFHMIN-NEXT: lh t0, 336(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV32ZVFHMIN-NEXT: vmv.x.s a6, v24
+; RV32ZVFHMIN-NEXT: vslidedown.vi v24, v16, 10
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, t0
+; RV32ZVFHMIN-NEXT: fle.h t0, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, t2
+; RV32ZVFHMIN-NEXT: xori t0, t0, 1
+; RV32ZVFHMIN-NEXT: sb t0, 104(sp)
+; RV32ZVFHMIN-NEXT: lh t0, 206(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, t3
+; RV32ZVFHMIN-NEXT: lh t1, 334(sp)
+; RV32ZVFHMIN-NEXT: fle.h t2, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV32ZVFHMIN-NEXT: vmv.x.s t0, v26
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, t1
+; RV32ZVFHMIN-NEXT: fle.h t1, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a7
+; RV32ZVFHMIN-NEXT: xori a7, t1, 1
+; RV32ZVFHMIN-NEXT: sb a7, 103(sp)
+; RV32ZVFHMIN-NEXT: lh a7, 204(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a6
+; RV32ZVFHMIN-NEXT: fle.h a6, fa4, fa5
+; RV32ZVFHMIN-NEXT: lh t1, 332(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a7
+; RV32ZVFHMIN-NEXT: vmv.x.s a7, v27
+; RV32ZVFHMIN-NEXT: vslidedown.vi v26, v16, 9
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, t1
+; RV32ZVFHMIN-NEXT: fle.h t1, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a7
+; RV32ZVFHMIN-NEXT: xori a7, t1, 1
+; RV32ZVFHMIN-NEXT: sb a7, 102(sp)
+; RV32ZVFHMIN-NEXT: lh a7, 202(sp)
+; RV32ZVFHMIN-NEXT: lh t0, 330(sp)
+; RV32ZVFHMIN-NEXT: fle.h t1, fa4, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a7
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, t0
+; RV32ZVFHMIN-NEXT: fle.h a7, fa4, fa5
+; RV32ZVFHMIN-NEXT: xori a7, a7, 1
+; RV32ZVFHMIN-NEXT: sb a7, 101(sp)
+; RV32ZVFHMIN-NEXT: lh a7, 200(sp)
+; RV32ZVFHMIN-NEXT: lh t0, 328(sp)
+; RV32ZVFHMIN-NEXT: xori a1, a1, 1
+; RV32ZVFHMIN-NEXT: xori a4, a4, 1
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a7
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, t0
+; RV32ZVFHMIN-NEXT: fle.h a7, fa4, fa5
+; RV32ZVFHMIN-NEXT: xori a7, a7, 1
+; RV32ZVFHMIN-NEXT: sb a7, 100(sp)
+; RV32ZVFHMIN-NEXT: lh a7, 198(sp)
+; RV32ZVFHMIN-NEXT: lh t0, 326(sp)
+; RV32ZVFHMIN-NEXT: xori a5, a5, 1
+; RV32ZVFHMIN-NEXT: xori t2, t2, 1
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a7
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, t0
+; RV32ZVFHMIN-NEXT: fle.h a7, fa4, fa5
+; RV32ZVFHMIN-NEXT: xori a7, a7, 1
+; RV32ZVFHMIN-NEXT: sb a7, 99(sp)
+; RV32ZVFHMIN-NEXT: lh a7, 196(sp)
+; RV32ZVFHMIN-NEXT: lh t0, 324(sp)
+; RV32ZVFHMIN-NEXT: xori a6, a6, 1
+; RV32ZVFHMIN-NEXT: xori t1, t1, 1
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a7
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, t0
+; RV32ZVFHMIN-NEXT: fle.h a7, fa4, fa5
+; RV32ZVFHMIN-NEXT: xori a7, a7, 1
+; RV32ZVFHMIN-NEXT: sb a7, 98(sp)
+; RV32ZVFHMIN-NEXT: lh a7, 194(sp)
+; RV32ZVFHMIN-NEXT: lh t0, 322(sp)
+; RV32ZVFHMIN-NEXT: sb t1, 65(sp)
+; RV32ZVFHMIN-NEXT: sb a6, 66(sp)
+; RV32ZVFHMIN-NEXT: sb t2, 67(sp)
+; RV32ZVFHMIN-NEXT: sb a5, 68(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a7
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, t0
+; RV32ZVFHMIN-NEXT: fle.h a5, fa4, fa5
+; RV32ZVFHMIN-NEXT: xori a5, a5, 1
+; RV32ZVFHMIN-NEXT: sb a4, 69(sp)
+; RV32ZVFHMIN-NEXT: sb a1, 70(sp)
+; RV32ZVFHMIN-NEXT: sb a0, 71(sp)
+; RV32ZVFHMIN-NEXT: sb a5, 97(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 254(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 382(sp)
+; RV32ZVFHMIN-NEXT: vslidedown.vi v16, v16, 8
+; RV32ZVFHMIN-NEXT: vslidedown.vi v2, v8, 14
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV32ZVFHMIN-NEXT: fle.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 127(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 252(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 380(sp)
+; RV32ZVFHMIN-NEXT: vslidedown.vi v0, v8, 13
+; RV32ZVFHMIN-NEXT: vslidedown.vi v4, v8, 12
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV32ZVFHMIN-NEXT: fle.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 126(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 250(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 378(sp)
+; RV32ZVFHMIN-NEXT: vslidedown.vi v6, v8, 11
+; RV32ZVFHMIN-NEXT: vslidedown.vi v30, v8, 10
+; RV32ZVFHMIN-NEXT: vslidedown.vi v28, v8, 9
+; RV32ZVFHMIN-NEXT: vslidedown.vi v8, v8, 8
+; RV32ZVFHMIN-NEXT: vmv.x.s a4, v10
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV32ZVFHMIN-NEXT: fle.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 125(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 248(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 376(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s a5, v12
+; RV32ZVFHMIN-NEXT: vmv.x.s a6, v14
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV32ZVFHMIN-NEXT: fle.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 124(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 246(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 374(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s a7, v2
+; RV32ZVFHMIN-NEXT: vmv.x.s t0, v18
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV32ZVFHMIN-NEXT: fle.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 123(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 244(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s t1, v0
+; RV32ZVFHMIN-NEXT: lh a1, 372(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s t2, v20
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: vmv.x.s t3, v4
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV32ZVFHMIN-NEXT: fle.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 122(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 242(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a5
+; RV32ZVFHMIN-NEXT: lh a4, 370(sp)
+; RV32ZVFHMIN-NEXT: fle.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a1
+; RV32ZVFHMIN-NEXT: vmv.x.s a5, v22
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a4
+; RV32ZVFHMIN-NEXT: fle.h a1, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV32ZVFHMIN-NEXT: xori a1, a1, 1
+; RV32ZVFHMIN-NEXT: sb a1, 121(sp)
+; RV32ZVFHMIN-NEXT: lh a4, 240(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a7
+; RV32ZVFHMIN-NEXT: lh a6, 368(sp)
+; RV32ZVFHMIN-NEXT: fle.h a1, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV32ZVFHMIN-NEXT: vmv.x.s a7, v6
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a6
+; RV32ZVFHMIN-NEXT: fle.h a4, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV32ZVFHMIN-NEXT: xori a4, a4, 1
+; RV32ZVFHMIN-NEXT: sb a4, 120(sp)
+; RV32ZVFHMIN-NEXT: lh a6, 238(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, t1
+; RV32ZVFHMIN-NEXT: lh t0, 366(sp)
+; RV32ZVFHMIN-NEXT: fle.h a4, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV32ZVFHMIN-NEXT: vmv.x.s a6, v24
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, t0
+; RV32ZVFHMIN-NEXT: fle.h t0, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, t2
+; RV32ZVFHMIN-NEXT: xori t0, t0, 1
+; RV32ZVFHMIN-NEXT: sb t0, 119(sp)
+; RV32ZVFHMIN-NEXT: lh t0, 236(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, t3
+; RV32ZVFHMIN-NEXT: lh t1, 364(sp)
+; RV32ZVFHMIN-NEXT: fle.h t2, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV32ZVFHMIN-NEXT: vmv.x.s t0, v30
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, t1
+; RV32ZVFHMIN-NEXT: fle.h t1, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV32ZVFHMIN-NEXT: xori a5, t1, 1
+; RV32ZVFHMIN-NEXT: sb a5, 118(sp)
+; RV32ZVFHMIN-NEXT: lh a5, 234(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a7
+; RV32ZVFHMIN-NEXT: lh a7, 362(sp)
+; RV32ZVFHMIN-NEXT: fle.h t1, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV32ZVFHMIN-NEXT: vmv.x.s a5, v26
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a7
+; RV32ZVFHMIN-NEXT: fle.h a7, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV32ZVFHMIN-NEXT: xori a6, a7, 1
+; RV32ZVFHMIN-NEXT: sb a6, 117(sp)
+; RV32ZVFHMIN-NEXT: lh a6, 232(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, t0
+; RV32ZVFHMIN-NEXT: lh a7, 360(sp)
+; RV32ZVFHMIN-NEXT: fle.h t0, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV32ZVFHMIN-NEXT: vmv.x.s a6, v28
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a7
+; RV32ZVFHMIN-NEXT: fle.h a7, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV32ZVFHMIN-NEXT: xori a5, a7, 1
+; RV32ZVFHMIN-NEXT: sb a5, 116(sp)
+; RV32ZVFHMIN-NEXT: lh a5, 230(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a6
+; RV32ZVFHMIN-NEXT: lh a6, 358(sp)
+; RV32ZVFHMIN-NEXT: fle.h a7, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV32ZVFHMIN-NEXT: vmv.x.s a5, v16
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a6
+; RV32ZVFHMIN-NEXT: fle.h a6, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV32ZVFHMIN-NEXT: vmv.x.s a5, v8
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: xori a1, a1, 1
+; RV32ZVFHMIN-NEXT: xori a4, a4, 1
+; RV32ZVFHMIN-NEXT: xori a5, t2, 1
+; RV32ZVFHMIN-NEXT: xori a6, a6, 1
+; RV32ZVFHMIN-NEXT: sb a6, 115(sp)
+; RV32ZVFHMIN-NEXT: lh a6, 228(sp)
+; RV32ZVFHMIN-NEXT: lh t2, 356(sp)
+; RV32ZVFHMIN-NEXT: sb a5, 76(sp)
+; RV32ZVFHMIN-NEXT: sb a4, 77(sp)
+; RV32ZVFHMIN-NEXT: sb a1, 78(sp)
+; RV32ZVFHMIN-NEXT: sb a0, 79(sp)
+; RV32ZVFHMIN-NEXT: fle.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: xori a1, t1, 1
+; RV32ZVFHMIN-NEXT: xori a4, t0, 1
+; RV32ZVFHMIN-NEXT: xori a5, a7, 1
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 72(sp)
+; RV32ZVFHMIN-NEXT: sb a5, 73(sp)
+; RV32ZVFHMIN-NEXT: sb a4, 74(sp)
+; RV32ZVFHMIN-NEXT: sb a1, 75(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, t2
+; RV32ZVFHMIN-NEXT: fle.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 114(sp)
+; RV32ZVFHMIN-NEXT: addi a0, sp, 64
+; RV32ZVFHMIN-NEXT: vsetvli zero, a3, e8, m4, ta, ma
+; RV32ZVFHMIN-NEXT: vle8.v v8, (a0)
+; RV32ZVFHMIN-NEXT: vand.vi v8, v8, 1
+; RV32ZVFHMIN-NEXT: vmsne.vi v12, v8, 0
+; RV32ZVFHMIN-NEXT: vsm.v v12, (a2)
+; RV32ZVFHMIN-NEXT: addi sp, s0, -512
+; RV32ZVFHMIN-NEXT: .cfi_def_cfa sp, 512
+; RV32ZVFHMIN-NEXT: lw ra, 508(sp) # 4-byte Folded Reload
+; RV32ZVFHMIN-NEXT: lw s0, 504(sp) # 4-byte Folded Reload
+; RV32ZVFHMIN-NEXT: .cfi_restore ra
+; RV32ZVFHMIN-NEXT: .cfi_restore s0
+; RV32ZVFHMIN-NEXT: addi sp, sp, 512
+; RV32ZVFHMIN-NEXT: .cfi_def_cfa_offset 0
+; RV32ZVFHMIN-NEXT: ret
+;
+; RV64ZVFHMIN-LABEL: fcmp_ugt_vv_v64f16:
+; RV64ZVFHMIN: # %bb.0:
+; RV64ZVFHMIN-NEXT: addi sp, sp, -512
+; RV64ZVFHMIN-NEXT: .cfi_def_cfa_offset 512
+; RV64ZVFHMIN-NEXT: sd ra, 504(sp) # 8-byte Folded Spill
+; RV64ZVFHMIN-NEXT: sd s0, 496(sp) # 8-byte Folded Spill
+; RV64ZVFHMIN-NEXT: .cfi_offset ra, -8
+; RV64ZVFHMIN-NEXT: .cfi_offset s0, -16
+; RV64ZVFHMIN-NEXT: addi s0, sp, 512
+; RV64ZVFHMIN-NEXT: .cfi_def_cfa s0, 0
+; RV64ZVFHMIN-NEXT: andi sp, sp, -128
+; RV64ZVFHMIN-NEXT: li a3, 64
+; RV64ZVFHMIN-NEXT: vsetvli zero, a3, e16, m8, ta, ma
+; RV64ZVFHMIN-NEXT: vle16.v v16, (a1)
+; RV64ZVFHMIN-NEXT: vle16.v v8, (a0)
+; RV64ZVFHMIN-NEXT: addi a0, sp, 128
+; RV64ZVFHMIN-NEXT: addi a1, sp, 256
+; RV64ZVFHMIN-NEXT: vse16.v v16, (a0)
+; RV64ZVFHMIN-NEXT: vse16.v v8, (a1)
+; RV64ZVFHMIN-NEXT: lh a0, 192(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 320(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV64ZVFHMIN-NEXT: fle.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 96(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 190(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 318(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV64ZVFHMIN-NEXT: fle.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 95(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 188(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 316(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV64ZVFHMIN-NEXT: fle.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 94(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 186(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 314(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV64ZVFHMIN-NEXT: fle.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 93(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 184(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 312(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV64ZVFHMIN-NEXT: fle.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 92(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 182(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 310(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV64ZVFHMIN-NEXT: fle.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 91(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 180(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 308(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV64ZVFHMIN-NEXT: fle.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 90(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 178(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 306(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV64ZVFHMIN-NEXT: fle.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 89(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 176(sp)
+; RV64ZVFHMIN-NEXT: lh a4, 304(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s a0, v16
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a1
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a4
+; RV64ZVFHMIN-NEXT: fle.h a1, fa4, fa5
+; RV64ZVFHMIN-NEXT: xori a1, a1, 1
+; RV64ZVFHMIN-NEXT: sb a1, 88(sp)
+; RV64ZVFHMIN-NEXT: lh a4, 174(sp)
+; RV64ZVFHMIN-NEXT: lh a5, 302(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s a1, v8
+; RV64ZVFHMIN-NEXT: vsetivli zero, 1, e16, m1, ta, ma
+; RV64ZVFHMIN-NEXT: vslidedown.vi v12, v16, 7
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a5
+; RV64ZVFHMIN-NEXT: fle.h a4, fa4, fa5
+; RV64ZVFHMIN-NEXT: xori a4, a4, 1
+; RV64ZVFHMIN-NEXT: sb a4, 87(sp)
+; RV64ZVFHMIN-NEXT: lh a4, 172(sp)
+; RV64ZVFHMIN-NEXT: lh a5, 300(sp)
+; RV64ZVFHMIN-NEXT: vslidedown.vi v13, v8, 7
+; RV64ZVFHMIN-NEXT: vslidedown.vi v14, v16, 6
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a5
+; RV64ZVFHMIN-NEXT: fle.h a4, fa4, fa5
+; RV64ZVFHMIN-NEXT: xori a4, a4, 1
+; RV64ZVFHMIN-NEXT: sb a4, 86(sp)
+; RV64ZVFHMIN-NEXT: lh a4, 170(sp)
+; RV64ZVFHMIN-NEXT: lh a5, 298(sp)
+; RV64ZVFHMIN-NEXT: vslidedown.vi v15, v8, 6
+; RV64ZVFHMIN-NEXT: vslidedown.vi v18, v16, 5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a5
+; RV64ZVFHMIN-NEXT: fle.h a4, fa4, fa5
+; RV64ZVFHMIN-NEXT: xori a4, a4, 1
+; RV64ZVFHMIN-NEXT: sb a4, 85(sp)
+; RV64ZVFHMIN-NEXT: lh a4, 168(sp)
+; RV64ZVFHMIN-NEXT: lh a5, 296(sp)
+; RV64ZVFHMIN-NEXT: vslidedown.vi v19, v8, 5
+; RV64ZVFHMIN-NEXT: vslidedown.vi v20, v16, 4
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a5
+; RV64ZVFHMIN-NEXT: fle.h a4, fa4, fa5
+; RV64ZVFHMIN-NEXT: xori a4, a4, 1
+; RV64ZVFHMIN-NEXT: sb a4, 84(sp)
+; RV64ZVFHMIN-NEXT: lh a4, 166(sp)
+; RV64ZVFHMIN-NEXT: lh a5, 294(sp)
+; RV64ZVFHMIN-NEXT: vslidedown.vi v21, v8, 4
+; RV64ZVFHMIN-NEXT: vslidedown.vi v23, v16, 3
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a5
+; RV64ZVFHMIN-NEXT: fle.h a4, fa4, fa5
+; RV64ZVFHMIN-NEXT: xori a4, a4, 1
+; RV64ZVFHMIN-NEXT: sb a4, 83(sp)
+; RV64ZVFHMIN-NEXT: lh a4, 164(sp)
+; RV64ZVFHMIN-NEXT: lh a5, 292(sp)
+; RV64ZVFHMIN-NEXT: vslidedown.vi v22, v8, 3
+; RV64ZVFHMIN-NEXT: vslidedown.vi v25, v16, 2
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a5
+; RV64ZVFHMIN-NEXT: fle.h a4, fa4, fa5
+; RV64ZVFHMIN-NEXT: xori a4, a4, 1
+; RV64ZVFHMIN-NEXT: sb a4, 82(sp)
+; RV64ZVFHMIN-NEXT: lh a4, 162(sp)
+; RV64ZVFHMIN-NEXT: lh a5, 290(sp)
+; RV64ZVFHMIN-NEXT: vslidedown.vi v24, v8, 2
+; RV64ZVFHMIN-NEXT: vslidedown.vi v26, v16, 1
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a5
+; RV64ZVFHMIN-NEXT: fle.h a4, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV64ZVFHMIN-NEXT: xori a0, a4, 1
+; RV64ZVFHMIN-NEXT: sb a0, 81(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 160(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 288(sp)
+; RV64ZVFHMIN-NEXT: fle.h a4, fa4, fa5
+; RV64ZVFHMIN-NEXT: xori a4, a4, 1
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV64ZVFHMIN-NEXT: fle.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a4, 64(sp)
+; RV64ZVFHMIN-NEXT: sb a0, 80(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 226(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 354(sp)
+; RV64ZVFHMIN-NEXT: vslidedown.vi v27, v8, 1
+; RV64ZVFHMIN-NEXT: vsetivli zero, 1, e16, m2, ta, ma
+; RV64ZVFHMIN-NEXT: vslidedown.vi v10, v16, 15
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV64ZVFHMIN-NEXT: fle.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 113(sp)
+; RV64ZVFHMIN-NEXT: lh a4, 224(sp)
+; RV64ZVFHMIN-NEXT: lh a5, 352(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s a1, v12
+; RV64ZVFHMIN-NEXT: vmv.x.s a0, v13
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a5
+; RV64ZVFHMIN-NEXT: fle.h a4, fa4, fa5
+; RV64ZVFHMIN-NEXT: xori a4, a4, 1
+; RV64ZVFHMIN-NEXT: sb a4, 112(sp)
+; RV64ZVFHMIN-NEXT: lh a4, 222(sp)
+; RV64ZVFHMIN-NEXT: lh a6, 350(sp)
+; RV64ZVFHMIN-NEXT: vslidedown.vi v12, v8, 15
+; RV64ZVFHMIN-NEXT: vmv.x.s a5, v14
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a6
+; RV64ZVFHMIN-NEXT: fle.h a4, fa4, fa5
+; RV64ZVFHMIN-NEXT: xori a4, a4, 1
+; RV64ZVFHMIN-NEXT: sb a4, 111(sp)
+; RV64ZVFHMIN-NEXT: lh a4, 220(sp)
+; RV64ZVFHMIN-NEXT: lh a6, 348(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s a7, v15
+; RV64ZVFHMIN-NEXT: vslidedown.vi v14, v16, 14
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a6
+; RV64ZVFHMIN-NEXT: fle.h a4, fa4, fa5
+; RV64ZVFHMIN-NEXT: xori a4, a4, 1
+; RV64ZVFHMIN-NEXT: sb a4, 110(sp)
+; RV64ZVFHMIN-NEXT: lh t0, 218(sp)
+; RV64ZVFHMIN-NEXT: lh t1, 346(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s a6, v18
+; RV64ZVFHMIN-NEXT: vmv.x.s a4, v19
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, t1
+; RV64ZVFHMIN-NEXT: fle.h t0, fa4, fa5
+; RV64ZVFHMIN-NEXT: xori t0, t0, 1
+; RV64ZVFHMIN-NEXT: sb t0, 109(sp)
+; RV64ZVFHMIN-NEXT: lh t0, 216(sp)
+; RV64ZVFHMIN-NEXT: vslidedown.vi v18, v16, 13
+; RV64ZVFHMIN-NEXT: vmv.x.s t1, v20
+; RV64ZVFHMIN-NEXT: lh t2, 344(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV64ZVFHMIN-NEXT: vmv.x.s t0, v21
+; RV64ZVFHMIN-NEXT: vslidedown.vi v20, v16, 12
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, t2
+; RV64ZVFHMIN-NEXT: fle.h t2, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a1
+; RV64ZVFHMIN-NEXT: xori a1, t2, 1
+; RV64ZVFHMIN-NEXT: sb a1, 108(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 214(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a0
+; RV64ZVFHMIN-NEXT: lh t3, 342(sp)
+; RV64ZVFHMIN-NEXT: fle.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a1
+; RV64ZVFHMIN-NEXT: vmv.x.s t2, v23
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, t3
+; RV64ZVFHMIN-NEXT: fle.h a1, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV64ZVFHMIN-NEXT: xori a1, a1, 1
+; RV64ZVFHMIN-NEXT: sb a1, 107(sp)
+; RV64ZVFHMIN-NEXT: lh a5, 212(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a7
+; RV64ZVFHMIN-NEXT: fle.h a1, fa4, fa5
+; RV64ZVFHMIN-NEXT: lh a7, 340(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV64ZVFHMIN-NEXT: vmv.x.s t3, v22
+; RV64ZVFHMIN-NEXT: vslidedown.vi v22, v16, 11
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a7
+; RV64ZVFHMIN-NEXT: fle.h a5, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV64ZVFHMIN-NEXT: xori a5, a5, 1
+; RV64ZVFHMIN-NEXT: sb a5, 106(sp)
+; RV64ZVFHMIN-NEXT: lh a5, 210(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a4
+; RV64ZVFHMIN-NEXT: lh a6, 338(sp)
+; RV64ZVFHMIN-NEXT: fle.h a4, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV64ZVFHMIN-NEXT: vmv.x.s a7, v25
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a6
+; RV64ZVFHMIN-NEXT: fle.h a5, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, t1
+; RV64ZVFHMIN-NEXT: xori a5, a5, 1
+; RV64ZVFHMIN-NEXT: sb a5, 105(sp)
+; RV64ZVFHMIN-NEXT: lh a6, 208(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, t0
+; RV64ZVFHMIN-NEXT: fle.h a5, fa4, fa5
+; RV64ZVFHMIN-NEXT: lh t0, 336(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV64ZVFHMIN-NEXT: vmv.x.s a6, v24
+; RV64ZVFHMIN-NEXT: vslidedown.vi v24, v16, 10
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, t0
+; RV64ZVFHMIN-NEXT: fle.h t0, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, t2
+; RV64ZVFHMIN-NEXT: xori t0, t0, 1
+; RV64ZVFHMIN-NEXT: sb t0, 104(sp)
+; RV64ZVFHMIN-NEXT: lh t0, 206(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, t3
+; RV64ZVFHMIN-NEXT: lh t1, 334(sp)
+; RV64ZVFHMIN-NEXT: fle.h t2, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV64ZVFHMIN-NEXT: vmv.x.s t0, v26
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, t1
+; RV64ZVFHMIN-NEXT: fle.h t1, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a7
+; RV64ZVFHMIN-NEXT: xori a7, t1, 1
+; RV64ZVFHMIN-NEXT: sb a7, 103(sp)
+; RV64ZVFHMIN-NEXT: lh a7, 204(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a6
+; RV64ZVFHMIN-NEXT: fle.h a6, fa4, fa5
+; RV64ZVFHMIN-NEXT: lh t1, 332(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a7
+; RV64ZVFHMIN-NEXT: vmv.x.s a7, v27
+; RV64ZVFHMIN-NEXT: vslidedown.vi v26, v16, 9
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, t1
+; RV64ZVFHMIN-NEXT: fle.h t1, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a7
+; RV64ZVFHMIN-NEXT: xori a7, t1, 1
+; RV64ZVFHMIN-NEXT: sb a7, 102(sp)
+; RV64ZVFHMIN-NEXT: lh a7, 202(sp)
+; RV64ZVFHMIN-NEXT: lh t0, 330(sp)
+; RV64ZVFHMIN-NEXT: fle.h t1, fa4, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a7
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, t0
+; RV64ZVFHMIN-NEXT: fle.h a7, fa4, fa5
+; RV64ZVFHMIN-NEXT: xori a7, a7, 1
+; RV64ZVFHMIN-NEXT: sb a7, 101(sp)
+; RV64ZVFHMIN-NEXT: lh a7, 200(sp)
+; RV64ZVFHMIN-NEXT: lh t0, 328(sp)
+; RV64ZVFHMIN-NEXT: xori a1, a1, 1
+; RV64ZVFHMIN-NEXT: xori a4, a4, 1
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a7
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, t0
+; RV64ZVFHMIN-NEXT: fle.h a7, fa4, fa5
+; RV64ZVFHMIN-NEXT: xori a7, a7, 1
+; RV64ZVFHMIN-NEXT: sb a7, 100(sp)
+; RV64ZVFHMIN-NEXT: lh a7, 198(sp)
+; RV64ZVFHMIN-NEXT: lh t0, 326(sp)
+; RV64ZVFHMIN-NEXT: xori a5, a5, 1
+; RV64ZVFHMIN-NEXT: xori t2, t2, 1
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a7
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, t0
+; RV64ZVFHMIN-NEXT: fle.h a7, fa4, fa5
+; RV64ZVFHMIN-NEXT: xori a7, a7, 1
+; RV64ZVFHMIN-NEXT: sb a7, 99(sp)
+; RV64ZVFHMIN-NEXT: lh a7, 196(sp)
+; RV64ZVFHMIN-NEXT: lh t0, 324(sp)
+; RV64ZVFHMIN-NEXT: xori a6, a6, 1
+; RV64ZVFHMIN-NEXT: xori t1, t1, 1
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a7
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, t0
+; RV64ZVFHMIN-NEXT: fle.h a7, fa4, fa5
+; RV64ZVFHMIN-NEXT: xori a7, a7, 1
+; RV64ZVFHMIN-NEXT: sb a7, 98(sp)
+; RV64ZVFHMIN-NEXT: lh a7, 194(sp)
+; RV64ZVFHMIN-NEXT: lh t0, 322(sp)
+; RV64ZVFHMIN-NEXT: sb t1, 65(sp)
+; RV64ZVFHMIN-NEXT: sb a6, 66(sp)
+; RV64ZVFHMIN-NEXT: sb t2, 67(sp)
+; RV64ZVFHMIN-NEXT: sb a5, 68(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a7
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, t0
+; RV64ZVFHMIN-NEXT: fle.h a5, fa4, fa5
+; RV64ZVFHMIN-NEXT: xori a5, a5, 1
+; RV64ZVFHMIN-NEXT: sb a4, 69(sp)
+; RV64ZVFHMIN-NEXT: sb a1, 70(sp)
+; RV64ZVFHMIN-NEXT: sb a0, 71(sp)
+; RV64ZVFHMIN-NEXT: sb a5, 97(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 254(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 382(sp)
+; RV64ZVFHMIN-NEXT: vslidedown.vi v16, v16, 8
+; RV64ZVFHMIN-NEXT: vslidedown.vi v2, v8, 14
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV64ZVFHMIN-NEXT: fle.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 127(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 252(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 380(sp)
+; RV64ZVFHMIN-NEXT: vslidedown.vi v0, v8, 13
+; RV64ZVFHMIN-NEXT: vslidedown.vi v4, v8, 12
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV64ZVFHMIN-NEXT: fle.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 126(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 250(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 378(sp)
+; RV64ZVFHMIN-NEXT: vslidedown.vi v6, v8, 11
+; RV64ZVFHMIN-NEXT: vslidedown.vi v30, v8, 10
+; RV64ZVFHMIN-NEXT: vslidedown.vi v28, v8, 9
+; RV64ZVFHMIN-NEXT: vslidedown.vi v8, v8, 8
+; RV64ZVFHMIN-NEXT: vmv.x.s a4, v10
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV64ZVFHMIN-NEXT: fle.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 125(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 248(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 376(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s a5, v12
+; RV64ZVFHMIN-NEXT: vmv.x.s a6, v14
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV64ZVFHMIN-NEXT: fle.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 124(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 246(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 374(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s a7, v2
+; RV64ZVFHMIN-NEXT: vmv.x.s t0, v18
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV64ZVFHMIN-NEXT: fle.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 123(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 244(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s t1, v0
+; RV64ZVFHMIN-NEXT: lh a1, 372(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s t2, v20
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: vmv.x.s t3, v4
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV64ZVFHMIN-NEXT: fle.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 122(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 242(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a5
+; RV64ZVFHMIN-NEXT: lh a4, 370(sp)
+; RV64ZVFHMIN-NEXT: fle.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a1
+; RV64ZVFHMIN-NEXT: vmv.x.s a5, v22
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a4
+; RV64ZVFHMIN-NEXT: fle.h a1, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV64ZVFHMIN-NEXT: xori a1, a1, 1
+; RV64ZVFHMIN-NEXT: sb a1, 121(sp)
+; RV64ZVFHMIN-NEXT: lh a4, 240(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a7
+; RV64ZVFHMIN-NEXT: lh a6, 368(sp)
+; RV64ZVFHMIN-NEXT: fle.h a1, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV64ZVFHMIN-NEXT: vmv.x.s a7, v6
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a6
+; RV64ZVFHMIN-NEXT: fle.h a4, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV64ZVFHMIN-NEXT: xori a4, a4, 1
+; RV64ZVFHMIN-NEXT: sb a4, 120(sp)
+; RV64ZVFHMIN-NEXT: lh a6, 238(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, t1
+; RV64ZVFHMIN-NEXT: lh t0, 366(sp)
+; RV64ZVFHMIN-NEXT: fle.h a4, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV64ZVFHMIN-NEXT: vmv.x.s a6, v24
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, t0
+; RV64ZVFHMIN-NEXT: fle.h t0, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, t2
+; RV64ZVFHMIN-NEXT: xori t0, t0, 1
+; RV64ZVFHMIN-NEXT: sb t0, 119(sp)
+; RV64ZVFHMIN-NEXT: lh t0, 236(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, t3
+; RV64ZVFHMIN-NEXT: lh t1, 364(sp)
+; RV64ZVFHMIN-NEXT: fle.h t2, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV64ZVFHMIN-NEXT: vmv.x.s t0, v30
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, t1
+; RV64ZVFHMIN-NEXT: fle.h t1, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV64ZVFHMIN-NEXT: xori a5, t1, 1
+; RV64ZVFHMIN-NEXT: sb a5, 118(sp)
+; RV64ZVFHMIN-NEXT: lh a5, 234(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a7
+; RV64ZVFHMIN-NEXT: lh a7, 362(sp)
+; RV64ZVFHMIN-NEXT: fle.h t1, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV64ZVFHMIN-NEXT: vmv.x.s a5, v26
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a7
+; RV64ZVFHMIN-NEXT: fle.h a7, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV64ZVFHMIN-NEXT: xori a6, a7, 1
+; RV64ZVFHMIN-NEXT: sb a6, 117(sp)
+; RV64ZVFHMIN-NEXT: lh a6, 232(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, t0
+; RV64ZVFHMIN-NEXT: lh a7, 360(sp)
+; RV64ZVFHMIN-NEXT: fle.h t0, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV64ZVFHMIN-NEXT: vmv.x.s a6, v28
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a7
+; RV64ZVFHMIN-NEXT: fle.h a7, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV64ZVFHMIN-NEXT: xori a5, a7, 1
+; RV64ZVFHMIN-NEXT: sb a5, 116(sp)
+; RV64ZVFHMIN-NEXT: lh a5, 230(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a6
+; RV64ZVFHMIN-NEXT: lh a6, 358(sp)
+; RV64ZVFHMIN-NEXT: fle.h a7, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV64ZVFHMIN-NEXT: vmv.x.s a5, v16
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a6
+; RV64ZVFHMIN-NEXT: fle.h a6, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV64ZVFHMIN-NEXT: vmv.x.s a5, v8
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: xori a1, a1, 1
+; RV64ZVFHMIN-NEXT: xori a4, a4, 1
+; RV64ZVFHMIN-NEXT: xori a5, t2, 1
+; RV64ZVFHMIN-NEXT: xori a6, a6, 1
+; RV64ZVFHMIN-NEXT: sb a6, 115(sp)
+; RV64ZVFHMIN-NEXT: lh a6, 228(sp)
+; RV64ZVFHMIN-NEXT: lh t2, 356(sp)
+; RV64ZVFHMIN-NEXT: sb a5, 76(sp)
+; RV64ZVFHMIN-NEXT: sb a4, 77(sp)
+; RV64ZVFHMIN-NEXT: sb a1, 78(sp)
+; RV64ZVFHMIN-NEXT: sb a0, 79(sp)
+; RV64ZVFHMIN-NEXT: fle.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: xori a1, t1, 1
+; RV64ZVFHMIN-NEXT: xori a4, t0, 1
+; RV64ZVFHMIN-NEXT: xori a5, a7, 1
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 72(sp)
+; RV64ZVFHMIN-NEXT: sb a5, 73(sp)
+; RV64ZVFHMIN-NEXT: sb a4, 74(sp)
+; RV64ZVFHMIN-NEXT: sb a1, 75(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, t2
+; RV64ZVFHMIN-NEXT: fle.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 114(sp)
+; RV64ZVFHMIN-NEXT: addi a0, sp, 64
+; RV64ZVFHMIN-NEXT: vsetvli zero, a3, e8, m4, ta, ma
+; RV64ZVFHMIN-NEXT: vle8.v v8, (a0)
+; RV64ZVFHMIN-NEXT: vand.vi v8, v8, 1
+; RV64ZVFHMIN-NEXT: vmsne.vi v12, v8, 0
+; RV64ZVFHMIN-NEXT: vsm.v v12, (a2)
+; RV64ZVFHMIN-NEXT: addi sp, s0, -512
+; RV64ZVFHMIN-NEXT: .cfi_def_cfa sp, 512
+; RV64ZVFHMIN-NEXT: ld ra, 504(sp) # 8-byte Folded Reload
+; RV64ZVFHMIN-NEXT: ld s0, 496(sp) # 8-byte Folded Reload
+; RV64ZVFHMIN-NEXT: .cfi_restore ra
+; RV64ZVFHMIN-NEXT: .cfi_restore s0
+; RV64ZVFHMIN-NEXT: addi sp, sp, 512
+; RV64ZVFHMIN-NEXT: .cfi_def_cfa_offset 0
+; RV64ZVFHMIN-NEXT: ret
%a = load <64 x half>, ptr %x
%b = load <64 x half>, ptr %y
%c = fcmp ugt <64 x half> %a, %b
@@ -454,6 +1484,908 @@ define void @fcmp_ugt_vv_v64f16_nonans(ptr %x, ptr %y, ptr %z) {
; ZVFH-NEXT: vmflt.vv v24, v16, v8
; ZVFH-NEXT: vsm.v v24, (a2)
; ZVFH-NEXT: ret
+;
+; RV32ZVFHMIN-LABEL: fcmp_ugt_vv_v64f16_nonans:
+; RV32ZVFHMIN: # %bb.0:
+; RV32ZVFHMIN-NEXT: addi sp, sp, -512
+; RV32ZVFHMIN-NEXT: .cfi_def_cfa_offset 512
+; RV32ZVFHMIN-NEXT: sw ra, 508(sp) # 4-byte Folded Spill
+; RV32ZVFHMIN-NEXT: sw s0, 504(sp) # 4-byte Folded Spill
+; RV32ZVFHMIN-NEXT: .cfi_offset ra, -4
+; RV32ZVFHMIN-NEXT: .cfi_offset s0, -8
+; RV32ZVFHMIN-NEXT: addi s0, sp, 512
+; RV32ZVFHMIN-NEXT: .cfi_def_cfa s0, 0
+; RV32ZVFHMIN-NEXT: andi sp, sp, -128
+; RV32ZVFHMIN-NEXT: li a3, 64
+; RV32ZVFHMIN-NEXT: vsetvli zero, a3, e16, m8, ta, ma
+; RV32ZVFHMIN-NEXT: vle16.v v16, (a0)
+; RV32ZVFHMIN-NEXT: vle16.v v8, (a1)
+; RV32ZVFHMIN-NEXT: addi a0, sp, 256
+; RV32ZVFHMIN-NEXT: addi a1, sp, 128
+; RV32ZVFHMIN-NEXT: vse16.v v16, (a0)
+; RV32ZVFHMIN-NEXT: vse16.v v8, (a1)
+; RV32ZVFHMIN-NEXT: lh a0, 320(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 192(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV32ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 96(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 318(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 190(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV32ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 95(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 316(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 188(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV32ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 94(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 314(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 186(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV32ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 93(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 312(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 184(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV32ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 92(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 310(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 182(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV32ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 91(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 308(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 180(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV32ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 90(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 306(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 178(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV32ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 89(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 304(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 176(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV32ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 88(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 302(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 174(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV32ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 87(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 300(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 172(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV32ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 86(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 298(sp)
+; RV32ZVFHMIN-NEXT: lh a4, 170(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s a0, v16
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a1
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a4
+; RV32ZVFHMIN-NEXT: flt.h a1, fa4, fa5
+; RV32ZVFHMIN-NEXT: sb a1, 85(sp)
+; RV32ZVFHMIN-NEXT: lh a4, 296(sp)
+; RV32ZVFHMIN-NEXT: lh a5, 168(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s a1, v8
+; RV32ZVFHMIN-NEXT: vsetivli zero, 1, e16, m1, ta, ma
+; RV32ZVFHMIN-NEXT: vslidedown.vi v12, v16, 7
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a5
+; RV32ZVFHMIN-NEXT: flt.h a4, fa4, fa5
+; RV32ZVFHMIN-NEXT: sb a4, 84(sp)
+; RV32ZVFHMIN-NEXT: lh a4, 294(sp)
+; RV32ZVFHMIN-NEXT: lh a5, 166(sp)
+; RV32ZVFHMIN-NEXT: vslidedown.vi v13, v8, 7
+; RV32ZVFHMIN-NEXT: vslidedown.vi v14, v16, 6
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a5
+; RV32ZVFHMIN-NEXT: flt.h a4, fa4, fa5
+; RV32ZVFHMIN-NEXT: sb a4, 83(sp)
+; RV32ZVFHMIN-NEXT: lh a4, 292(sp)
+; RV32ZVFHMIN-NEXT: lh a5, 164(sp)
+; RV32ZVFHMIN-NEXT: vslidedown.vi v15, v8, 6
+; RV32ZVFHMIN-NEXT: vslidedown.vi v18, v16, 5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a5
+; RV32ZVFHMIN-NEXT: flt.h a4, fa4, fa5
+; RV32ZVFHMIN-NEXT: sb a4, 82(sp)
+; RV32ZVFHMIN-NEXT: lh a4, 290(sp)
+; RV32ZVFHMIN-NEXT: lh a5, 162(sp)
+; RV32ZVFHMIN-NEXT: vslidedown.vi v19, v8, 5
+; RV32ZVFHMIN-NEXT: vslidedown.vi v20, v16, 4
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a5
+; RV32ZVFHMIN-NEXT: flt.h a4, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: sb a4, 81(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 288(sp)
+; RV32ZVFHMIN-NEXT: lh a4, 160(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV32ZVFHMIN-NEXT: flt.h a1, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a4
+; RV32ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: sb a1, 64(sp)
+; RV32ZVFHMIN-NEXT: sb a0, 80(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 354(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 226(sp)
+; RV32ZVFHMIN-NEXT: vslidedown.vi v21, v8, 4
+; RV32ZVFHMIN-NEXT: vslidedown.vi v23, v16, 3
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV32ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 113(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 352(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 224(sp)
+; RV32ZVFHMIN-NEXT: vslidedown.vi v22, v8, 3
+; RV32ZVFHMIN-NEXT: vslidedown.vi v25, v16, 2
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV32ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 112(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 350(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 222(sp)
+; RV32ZVFHMIN-NEXT: vslidedown.vi v24, v8, 2
+; RV32ZVFHMIN-NEXT: vslidedown.vi v27, v16, 1
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV32ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 111(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 348(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 220(sp)
+; RV32ZVFHMIN-NEXT: vslidedown.vi v26, v8, 1
+; RV32ZVFHMIN-NEXT: vsetivli zero, 1, e16, m2, ta, ma
+; RV32ZVFHMIN-NEXT: vslidedown.vi v10, v16, 15
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV32ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 110(sp)
+; RV32ZVFHMIN-NEXT: lh a4, 346(sp)
+; RV32ZVFHMIN-NEXT: lh a5, 218(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s a1, v12
+; RV32ZVFHMIN-NEXT: vmv.x.s a0, v13
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a5
+; RV32ZVFHMIN-NEXT: flt.h a4, fa4, fa5
+; RV32ZVFHMIN-NEXT: sb a4, 109(sp)
+; RV32ZVFHMIN-NEXT: lh a4, 344(sp)
+; RV32ZVFHMIN-NEXT: lh a6, 216(sp)
+; RV32ZVFHMIN-NEXT: vslidedown.vi v12, v8, 15
+; RV32ZVFHMIN-NEXT: vmv.x.s a5, v14
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a6
+; RV32ZVFHMIN-NEXT: flt.h a4, fa4, fa5
+; RV32ZVFHMIN-NEXT: sb a4, 108(sp)
+; RV32ZVFHMIN-NEXT: lh a4, 342(sp)
+; RV32ZVFHMIN-NEXT: lh a6, 214(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s a7, v15
+; RV32ZVFHMIN-NEXT: vslidedown.vi v14, v16, 14
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a6
+; RV32ZVFHMIN-NEXT: flt.h a4, fa4, fa5
+; RV32ZVFHMIN-NEXT: sb a4, 107(sp)
+; RV32ZVFHMIN-NEXT: lh t0, 340(sp)
+; RV32ZVFHMIN-NEXT: lh t1, 212(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s a6, v18
+; RV32ZVFHMIN-NEXT: vmv.x.s a4, v19
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, t1
+; RV32ZVFHMIN-NEXT: flt.h t0, fa4, fa5
+; RV32ZVFHMIN-NEXT: sb t0, 106(sp)
+; RV32ZVFHMIN-NEXT: lh t1, 338(sp)
+; RV32ZVFHMIN-NEXT: vslidedown.vi v18, v16, 13
+; RV32ZVFHMIN-NEXT: vmv.x.s t0, v20
+; RV32ZVFHMIN-NEXT: lh t2, 210(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, t1
+; RV32ZVFHMIN-NEXT: vmv.x.s t1, v21
+; RV32ZVFHMIN-NEXT: vslidedown.vi v20, v16, 12
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, t2
+; RV32ZVFHMIN-NEXT: flt.h t2, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a1
+; RV32ZVFHMIN-NEXT: sb t2, 105(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 336(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a0
+; RV32ZVFHMIN-NEXT: lh t3, 208(sp)
+; RV32ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a1
+; RV32ZVFHMIN-NEXT: vmv.x.s t2, v23
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, t3
+; RV32ZVFHMIN-NEXT: flt.h a1, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV32ZVFHMIN-NEXT: sb a1, 104(sp)
+; RV32ZVFHMIN-NEXT: lh a5, 334(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a7
+; RV32ZVFHMIN-NEXT: flt.h a1, fa4, fa5
+; RV32ZVFHMIN-NEXT: lh a7, 206(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV32ZVFHMIN-NEXT: vmv.x.s t3, v22
+; RV32ZVFHMIN-NEXT: vslidedown.vi v22, v16, 11
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a7
+; RV32ZVFHMIN-NEXT: flt.h a5, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV32ZVFHMIN-NEXT: sb a5, 103(sp)
+; RV32ZVFHMIN-NEXT: lh a5, 332(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a4
+; RV32ZVFHMIN-NEXT: lh a6, 204(sp)
+; RV32ZVFHMIN-NEXT: flt.h a4, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV32ZVFHMIN-NEXT: vmv.x.s a7, v25
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a6
+; RV32ZVFHMIN-NEXT: flt.h a5, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV32ZVFHMIN-NEXT: sb a5, 102(sp)
+; RV32ZVFHMIN-NEXT: lh a6, 330(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, t1
+; RV32ZVFHMIN-NEXT: flt.h a5, fa4, fa5
+; RV32ZVFHMIN-NEXT: lh t0, 202(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV32ZVFHMIN-NEXT: vmv.x.s a6, v24
+; RV32ZVFHMIN-NEXT: vslidedown.vi v24, v16, 10
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, t0
+; RV32ZVFHMIN-NEXT: flt.h t0, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, t2
+; RV32ZVFHMIN-NEXT: sb t0, 101(sp)
+; RV32ZVFHMIN-NEXT: lh t0, 328(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, t3
+; RV32ZVFHMIN-NEXT: lh t1, 200(sp)
+; RV32ZVFHMIN-NEXT: flt.h t2, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV32ZVFHMIN-NEXT: vmv.x.s t0, v27
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, t1
+; RV32ZVFHMIN-NEXT: flt.h t1, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a7
+; RV32ZVFHMIN-NEXT: sb t1, 100(sp)
+; RV32ZVFHMIN-NEXT: lh a7, 326(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a6
+; RV32ZVFHMIN-NEXT: flt.h a6, fa4, fa5
+; RV32ZVFHMIN-NEXT: lh t1, 198(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a7
+; RV32ZVFHMIN-NEXT: vmv.x.s a7, v26
+; RV32ZVFHMIN-NEXT: vslidedown.vi v26, v16, 9
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, t1
+; RV32ZVFHMIN-NEXT: flt.h t1, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV32ZVFHMIN-NEXT: sb t1, 99(sp)
+; RV32ZVFHMIN-NEXT: lh t0, 324(sp)
+; RV32ZVFHMIN-NEXT: lh t1, 196(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a7
+; RV32ZVFHMIN-NEXT: flt.h a7, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, t1
+; RV32ZVFHMIN-NEXT: flt.h t0, fa4, fa5
+; RV32ZVFHMIN-NEXT: sb t0, 98(sp)
+; RV32ZVFHMIN-NEXT: lh t0, 322(sp)
+; RV32ZVFHMIN-NEXT: lh t1, 194(sp)
+; RV32ZVFHMIN-NEXT: sb a7, 65(sp)
+; RV32ZVFHMIN-NEXT: sb a6, 66(sp)
+; RV32ZVFHMIN-NEXT: sb t2, 67(sp)
+; RV32ZVFHMIN-NEXT: sb a5, 68(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, t1
+; RV32ZVFHMIN-NEXT: flt.h a5, fa4, fa5
+; RV32ZVFHMIN-NEXT: sb a4, 69(sp)
+; RV32ZVFHMIN-NEXT: sb a1, 70(sp)
+; RV32ZVFHMIN-NEXT: sb a0, 71(sp)
+; RV32ZVFHMIN-NEXT: sb a5, 97(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 382(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 254(sp)
+; RV32ZVFHMIN-NEXT: vslidedown.vi v16, v16, 8
+; RV32ZVFHMIN-NEXT: vslidedown.vi v2, v8, 14
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV32ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 127(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 380(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 252(sp)
+; RV32ZVFHMIN-NEXT: vslidedown.vi v0, v8, 13
+; RV32ZVFHMIN-NEXT: vslidedown.vi v4, v8, 12
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV32ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 126(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 378(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 250(sp)
+; RV32ZVFHMIN-NEXT: vslidedown.vi v6, v8, 11
+; RV32ZVFHMIN-NEXT: vslidedown.vi v30, v8, 10
+; RV32ZVFHMIN-NEXT: vslidedown.vi v28, v8, 9
+; RV32ZVFHMIN-NEXT: vslidedown.vi v8, v8, 8
+; RV32ZVFHMIN-NEXT: vmv.x.s a4, v10
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV32ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 125(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 376(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 248(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s a5, v12
+; RV32ZVFHMIN-NEXT: vmv.x.s a6, v14
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV32ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 124(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 374(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 246(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s a7, v2
+; RV32ZVFHMIN-NEXT: vmv.x.s t0, v18
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV32ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 123(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 372(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s t1, v0
+; RV32ZVFHMIN-NEXT: lh a1, 244(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s t2, v20
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: vmv.x.s t3, v4
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV32ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV32ZVFHMIN-NEXT: sb a0, 122(sp)
+; RV32ZVFHMIN-NEXT: lh a1, 370(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a5
+; RV32ZVFHMIN-NEXT: lh a4, 242(sp)
+; RV32ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a1
+; RV32ZVFHMIN-NEXT: vmv.x.s a5, v22
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a4
+; RV32ZVFHMIN-NEXT: flt.h a1, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV32ZVFHMIN-NEXT: sb a1, 121(sp)
+; RV32ZVFHMIN-NEXT: lh a4, 368(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a7
+; RV32ZVFHMIN-NEXT: lh a6, 240(sp)
+; RV32ZVFHMIN-NEXT: flt.h a1, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV32ZVFHMIN-NEXT: vmv.x.s a7, v6
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a6
+; RV32ZVFHMIN-NEXT: flt.h a4, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV32ZVFHMIN-NEXT: sb a4, 120(sp)
+; RV32ZVFHMIN-NEXT: lh a6, 366(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, t1
+; RV32ZVFHMIN-NEXT: lh t0, 238(sp)
+; RV32ZVFHMIN-NEXT: flt.h a4, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV32ZVFHMIN-NEXT: vmv.x.s a6, v24
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, t0
+; RV32ZVFHMIN-NEXT: flt.h t0, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, t2
+; RV32ZVFHMIN-NEXT: sb t0, 119(sp)
+; RV32ZVFHMIN-NEXT: lh t0, 364(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, t3
+; RV32ZVFHMIN-NEXT: lh t1, 236(sp)
+; RV32ZVFHMIN-NEXT: flt.h t2, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV32ZVFHMIN-NEXT: vmv.x.s t0, v30
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, t1
+; RV32ZVFHMIN-NEXT: flt.h t1, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV32ZVFHMIN-NEXT: sb t1, 118(sp)
+; RV32ZVFHMIN-NEXT: lh a5, 362(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a7
+; RV32ZVFHMIN-NEXT: lh a7, 234(sp)
+; RV32ZVFHMIN-NEXT: flt.h t1, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV32ZVFHMIN-NEXT: vmv.x.s a5, v26
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a7
+; RV32ZVFHMIN-NEXT: flt.h a7, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV32ZVFHMIN-NEXT: sb a7, 117(sp)
+; RV32ZVFHMIN-NEXT: lh a6, 360(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, t0
+; RV32ZVFHMIN-NEXT: lh a7, 232(sp)
+; RV32ZVFHMIN-NEXT: flt.h t0, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV32ZVFHMIN-NEXT: vmv.x.s a6, v28
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a7
+; RV32ZVFHMIN-NEXT: flt.h a7, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV32ZVFHMIN-NEXT: sb a7, 116(sp)
+; RV32ZVFHMIN-NEXT: lh a5, 358(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a6
+; RV32ZVFHMIN-NEXT: lh a6, 230(sp)
+; RV32ZVFHMIN-NEXT: flt.h a7, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV32ZVFHMIN-NEXT: vmv.x.s a5, v16
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a6
+; RV32ZVFHMIN-NEXT: flt.h a6, fa4, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV32ZVFHMIN-NEXT: vmv.x.s a5, v8
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a5
+; RV32ZVFHMIN-NEXT: sb a6, 115(sp)
+; RV32ZVFHMIN-NEXT: lh a5, 356(sp)
+; RV32ZVFHMIN-NEXT: lh a6, 228(sp)
+; RV32ZVFHMIN-NEXT: sb t2, 76(sp)
+; RV32ZVFHMIN-NEXT: sb a4, 77(sp)
+; RV32ZVFHMIN-NEXT: sb a1, 78(sp)
+; RV32ZVFHMIN-NEXT: sb a0, 79(sp)
+; RV32ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 72(sp)
+; RV32ZVFHMIN-NEXT: sb a7, 73(sp)
+; RV32ZVFHMIN-NEXT: sb t0, 74(sp)
+; RV32ZVFHMIN-NEXT: sb t1, 75(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa4, a6
+; RV32ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 114(sp)
+; RV32ZVFHMIN-NEXT: addi a0, sp, 64
+; RV32ZVFHMIN-NEXT: vsetvli zero, a3, e8, m4, ta, ma
+; RV32ZVFHMIN-NEXT: vle8.v v8, (a0)
+; RV32ZVFHMIN-NEXT: vand.vi v8, v8, 1
+; RV32ZVFHMIN-NEXT: vmsne.vi v12, v8, 0
+; RV32ZVFHMIN-NEXT: vsm.v v12, (a2)
+; RV32ZVFHMIN-NEXT: addi sp, s0, -512
+; RV32ZVFHMIN-NEXT: .cfi_def_cfa sp, 512
+; RV32ZVFHMIN-NEXT: lw ra, 508(sp) # 4-byte Folded Reload
+; RV32ZVFHMIN-NEXT: lw s0, 504(sp) # 4-byte Folded Reload
+; RV32ZVFHMIN-NEXT: .cfi_restore ra
+; RV32ZVFHMIN-NEXT: .cfi_restore s0
+; RV32ZVFHMIN-NEXT: addi sp, sp, 512
+; RV32ZVFHMIN-NEXT: .cfi_def_cfa_offset 0
+; RV32ZVFHMIN-NEXT: ret
+;
+; RV64ZVFHMIN-LABEL: fcmp_ugt_vv_v64f16_nonans:
+; RV64ZVFHMIN: # %bb.0:
+; RV64ZVFHMIN-NEXT: addi sp, sp, -512
+; RV64ZVFHMIN-NEXT: .cfi_def_cfa_offset 512
+; RV64ZVFHMIN-NEXT: sd ra, 504(sp) # 8-byte Folded Spill
+; RV64ZVFHMIN-NEXT: sd s0, 496(sp) # 8-byte Folded Spill
+; RV64ZVFHMIN-NEXT: .cfi_offset ra, -8
+; RV64ZVFHMIN-NEXT: .cfi_offset s0, -16
+; RV64ZVFHMIN-NEXT: addi s0, sp, 512
+; RV64ZVFHMIN-NEXT: .cfi_def_cfa s0, 0
+; RV64ZVFHMIN-NEXT: andi sp, sp, -128
+; RV64ZVFHMIN-NEXT: li a3, 64
+; RV64ZVFHMIN-NEXT: vsetvli zero, a3, e16, m8, ta, ma
+; RV64ZVFHMIN-NEXT: vle16.v v16, (a0)
+; RV64ZVFHMIN-NEXT: vle16.v v8, (a1)
+; RV64ZVFHMIN-NEXT: addi a0, sp, 256
+; RV64ZVFHMIN-NEXT: addi a1, sp, 128
+; RV64ZVFHMIN-NEXT: vse16.v v16, (a0)
+; RV64ZVFHMIN-NEXT: vse16.v v8, (a1)
+; RV64ZVFHMIN-NEXT: lh a0, 320(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 192(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV64ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 96(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 318(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 190(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV64ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 95(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 316(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 188(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV64ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 94(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 314(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 186(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV64ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 93(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 312(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 184(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV64ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 92(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 310(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 182(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV64ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 91(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 308(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 180(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV64ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 90(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 306(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 178(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV64ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 89(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 304(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 176(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV64ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 88(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 302(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 174(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV64ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 87(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 300(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 172(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV64ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 86(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 298(sp)
+; RV64ZVFHMIN-NEXT: lh a4, 170(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s a0, v16
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a1
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a4
+; RV64ZVFHMIN-NEXT: flt.h a1, fa4, fa5
+; RV64ZVFHMIN-NEXT: sb a1, 85(sp)
+; RV64ZVFHMIN-NEXT: lh a4, 296(sp)
+; RV64ZVFHMIN-NEXT: lh a5, 168(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s a1, v8
+; RV64ZVFHMIN-NEXT: vsetivli zero, 1, e16, m1, ta, ma
+; RV64ZVFHMIN-NEXT: vslidedown.vi v12, v16, 7
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a5
+; RV64ZVFHMIN-NEXT: flt.h a4, fa4, fa5
+; RV64ZVFHMIN-NEXT: sb a4, 84(sp)
+; RV64ZVFHMIN-NEXT: lh a4, 294(sp)
+; RV64ZVFHMIN-NEXT: lh a5, 166(sp)
+; RV64ZVFHMIN-NEXT: vslidedown.vi v13, v8, 7
+; RV64ZVFHMIN-NEXT: vslidedown.vi v14, v16, 6
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a5
+; RV64ZVFHMIN-NEXT: flt.h a4, fa4, fa5
+; RV64ZVFHMIN-NEXT: sb a4, 83(sp)
+; RV64ZVFHMIN-NEXT: lh a4, 292(sp)
+; RV64ZVFHMIN-NEXT: lh a5, 164(sp)
+; RV64ZVFHMIN-NEXT: vslidedown.vi v15, v8, 6
+; RV64ZVFHMIN-NEXT: vslidedown.vi v18, v16, 5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a5
+; RV64ZVFHMIN-NEXT: flt.h a4, fa4, fa5
+; RV64ZVFHMIN-NEXT: sb a4, 82(sp)
+; RV64ZVFHMIN-NEXT: lh a4, 290(sp)
+; RV64ZVFHMIN-NEXT: lh a5, 162(sp)
+; RV64ZVFHMIN-NEXT: vslidedown.vi v19, v8, 5
+; RV64ZVFHMIN-NEXT: vslidedown.vi v20, v16, 4
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a5
+; RV64ZVFHMIN-NEXT: flt.h a4, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: sb a4, 81(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 288(sp)
+; RV64ZVFHMIN-NEXT: lh a4, 160(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV64ZVFHMIN-NEXT: flt.h a1, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a4
+; RV64ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: sb a1, 64(sp)
+; RV64ZVFHMIN-NEXT: sb a0, 80(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 354(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 226(sp)
+; RV64ZVFHMIN-NEXT: vslidedown.vi v21, v8, 4
+; RV64ZVFHMIN-NEXT: vslidedown.vi v23, v16, 3
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV64ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 113(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 352(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 224(sp)
+; RV64ZVFHMIN-NEXT: vslidedown.vi v22, v8, 3
+; RV64ZVFHMIN-NEXT: vslidedown.vi v25, v16, 2
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV64ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 112(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 350(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 222(sp)
+; RV64ZVFHMIN-NEXT: vslidedown.vi v24, v8, 2
+; RV64ZVFHMIN-NEXT: vslidedown.vi v27, v16, 1
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV64ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 111(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 348(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 220(sp)
+; RV64ZVFHMIN-NEXT: vslidedown.vi v26, v8, 1
+; RV64ZVFHMIN-NEXT: vsetivli zero, 1, e16, m2, ta, ma
+; RV64ZVFHMIN-NEXT: vslidedown.vi v10, v16, 15
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV64ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 110(sp)
+; RV64ZVFHMIN-NEXT: lh a4, 346(sp)
+; RV64ZVFHMIN-NEXT: lh a5, 218(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s a1, v12
+; RV64ZVFHMIN-NEXT: vmv.x.s a0, v13
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a5
+; RV64ZVFHMIN-NEXT: flt.h a4, fa4, fa5
+; RV64ZVFHMIN-NEXT: sb a4, 109(sp)
+; RV64ZVFHMIN-NEXT: lh a4, 344(sp)
+; RV64ZVFHMIN-NEXT: lh a6, 216(sp)
+; RV64ZVFHMIN-NEXT: vslidedown.vi v12, v8, 15
+; RV64ZVFHMIN-NEXT: vmv.x.s a5, v14
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a6
+; RV64ZVFHMIN-NEXT: flt.h a4, fa4, fa5
+; RV64ZVFHMIN-NEXT: sb a4, 108(sp)
+; RV64ZVFHMIN-NEXT: lh a4, 342(sp)
+; RV64ZVFHMIN-NEXT: lh a6, 214(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s a7, v15
+; RV64ZVFHMIN-NEXT: vslidedown.vi v14, v16, 14
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a6
+; RV64ZVFHMIN-NEXT: flt.h a4, fa4, fa5
+; RV64ZVFHMIN-NEXT: sb a4, 107(sp)
+; RV64ZVFHMIN-NEXT: lh t0, 340(sp)
+; RV64ZVFHMIN-NEXT: lh t1, 212(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s a6, v18
+; RV64ZVFHMIN-NEXT: vmv.x.s a4, v19
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, t1
+; RV64ZVFHMIN-NEXT: flt.h t0, fa4, fa5
+; RV64ZVFHMIN-NEXT: sb t0, 106(sp)
+; RV64ZVFHMIN-NEXT: lh t1, 338(sp)
+; RV64ZVFHMIN-NEXT: vslidedown.vi v18, v16, 13
+; RV64ZVFHMIN-NEXT: vmv.x.s t0, v20
+; RV64ZVFHMIN-NEXT: lh t2, 210(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, t1
+; RV64ZVFHMIN-NEXT: vmv.x.s t1, v21
+; RV64ZVFHMIN-NEXT: vslidedown.vi v20, v16, 12
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, t2
+; RV64ZVFHMIN-NEXT: flt.h t2, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a1
+; RV64ZVFHMIN-NEXT: sb t2, 105(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 336(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a0
+; RV64ZVFHMIN-NEXT: lh t3, 208(sp)
+; RV64ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a1
+; RV64ZVFHMIN-NEXT: vmv.x.s t2, v23
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, t3
+; RV64ZVFHMIN-NEXT: flt.h a1, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV64ZVFHMIN-NEXT: sb a1, 104(sp)
+; RV64ZVFHMIN-NEXT: lh a5, 334(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a7
+; RV64ZVFHMIN-NEXT: flt.h a1, fa4, fa5
+; RV64ZVFHMIN-NEXT: lh a7, 206(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV64ZVFHMIN-NEXT: vmv.x.s t3, v22
+; RV64ZVFHMIN-NEXT: vslidedown.vi v22, v16, 11
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a7
+; RV64ZVFHMIN-NEXT: flt.h a5, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV64ZVFHMIN-NEXT: sb a5, 103(sp)
+; RV64ZVFHMIN-NEXT: lh a5, 332(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a4
+; RV64ZVFHMIN-NEXT: lh a6, 204(sp)
+; RV64ZVFHMIN-NEXT: flt.h a4, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV64ZVFHMIN-NEXT: vmv.x.s a7, v25
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a6
+; RV64ZVFHMIN-NEXT: flt.h a5, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV64ZVFHMIN-NEXT: sb a5, 102(sp)
+; RV64ZVFHMIN-NEXT: lh a6, 330(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, t1
+; RV64ZVFHMIN-NEXT: flt.h a5, fa4, fa5
+; RV64ZVFHMIN-NEXT: lh t0, 202(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV64ZVFHMIN-NEXT: vmv.x.s a6, v24
+; RV64ZVFHMIN-NEXT: vslidedown.vi v24, v16, 10
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, t0
+; RV64ZVFHMIN-NEXT: flt.h t0, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, t2
+; RV64ZVFHMIN-NEXT: sb t0, 101(sp)
+; RV64ZVFHMIN-NEXT: lh t0, 328(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, t3
+; RV64ZVFHMIN-NEXT: lh t1, 200(sp)
+; RV64ZVFHMIN-NEXT: flt.h t2, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV64ZVFHMIN-NEXT: vmv.x.s t0, v27
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, t1
+; RV64ZVFHMIN-NEXT: flt.h t1, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a7
+; RV64ZVFHMIN-NEXT: sb t1, 100(sp)
+; RV64ZVFHMIN-NEXT: lh a7, 326(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a6
+; RV64ZVFHMIN-NEXT: flt.h a6, fa4, fa5
+; RV64ZVFHMIN-NEXT: lh t1, 198(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a7
+; RV64ZVFHMIN-NEXT: vmv.x.s a7, v26
+; RV64ZVFHMIN-NEXT: vslidedown.vi v26, v16, 9
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, t1
+; RV64ZVFHMIN-NEXT: flt.h t1, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV64ZVFHMIN-NEXT: sb t1, 99(sp)
+; RV64ZVFHMIN-NEXT: lh t0, 324(sp)
+; RV64ZVFHMIN-NEXT: lh t1, 196(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a7
+; RV64ZVFHMIN-NEXT: flt.h a7, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, t1
+; RV64ZVFHMIN-NEXT: flt.h t0, fa4, fa5
+; RV64ZVFHMIN-NEXT: sb t0, 98(sp)
+; RV64ZVFHMIN-NEXT: lh t0, 322(sp)
+; RV64ZVFHMIN-NEXT: lh t1, 194(sp)
+; RV64ZVFHMIN-NEXT: sb a7, 65(sp)
+; RV64ZVFHMIN-NEXT: sb a6, 66(sp)
+; RV64ZVFHMIN-NEXT: sb t2, 67(sp)
+; RV64ZVFHMIN-NEXT: sb a5, 68(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, t1
+; RV64ZVFHMIN-NEXT: flt.h a5, fa4, fa5
+; RV64ZVFHMIN-NEXT: sb a4, 69(sp)
+; RV64ZVFHMIN-NEXT: sb a1, 70(sp)
+; RV64ZVFHMIN-NEXT: sb a0, 71(sp)
+; RV64ZVFHMIN-NEXT: sb a5, 97(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 382(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 254(sp)
+; RV64ZVFHMIN-NEXT: vslidedown.vi v16, v16, 8
+; RV64ZVFHMIN-NEXT: vslidedown.vi v2, v8, 14
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV64ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 127(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 380(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 252(sp)
+; RV64ZVFHMIN-NEXT: vslidedown.vi v0, v8, 13
+; RV64ZVFHMIN-NEXT: vslidedown.vi v4, v8, 12
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV64ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 126(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 378(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 250(sp)
+; RV64ZVFHMIN-NEXT: vslidedown.vi v6, v8, 11
+; RV64ZVFHMIN-NEXT: vslidedown.vi v30, v8, 10
+; RV64ZVFHMIN-NEXT: vslidedown.vi v28, v8, 9
+; RV64ZVFHMIN-NEXT: vslidedown.vi v8, v8, 8
+; RV64ZVFHMIN-NEXT: vmv.x.s a4, v10
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV64ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 125(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 376(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 248(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s a5, v12
+; RV64ZVFHMIN-NEXT: vmv.x.s a6, v14
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV64ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 124(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 374(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 246(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s a7, v2
+; RV64ZVFHMIN-NEXT: vmv.x.s t0, v18
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV64ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 123(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 372(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s t1, v0
+; RV64ZVFHMIN-NEXT: lh a1, 244(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s t2, v20
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: vmv.x.s t3, v4
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a1
+; RV64ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV64ZVFHMIN-NEXT: sb a0, 122(sp)
+; RV64ZVFHMIN-NEXT: lh a1, 370(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a5
+; RV64ZVFHMIN-NEXT: lh a4, 242(sp)
+; RV64ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a1
+; RV64ZVFHMIN-NEXT: vmv.x.s a5, v22
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a4
+; RV64ZVFHMIN-NEXT: flt.h a1, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV64ZVFHMIN-NEXT: sb a1, 121(sp)
+; RV64ZVFHMIN-NEXT: lh a4, 368(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a7
+; RV64ZVFHMIN-NEXT: lh a6, 240(sp)
+; RV64ZVFHMIN-NEXT: flt.h a1, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV64ZVFHMIN-NEXT: vmv.x.s a7, v6
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a6
+; RV64ZVFHMIN-NEXT: flt.h a4, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV64ZVFHMIN-NEXT: sb a4, 120(sp)
+; RV64ZVFHMIN-NEXT: lh a6, 366(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, t1
+; RV64ZVFHMIN-NEXT: lh t0, 238(sp)
+; RV64ZVFHMIN-NEXT: flt.h a4, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV64ZVFHMIN-NEXT: vmv.x.s a6, v24
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, t0
+; RV64ZVFHMIN-NEXT: flt.h t0, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, t2
+; RV64ZVFHMIN-NEXT: sb t0, 119(sp)
+; RV64ZVFHMIN-NEXT: lh t0, 364(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, t3
+; RV64ZVFHMIN-NEXT: lh t1, 236(sp)
+; RV64ZVFHMIN-NEXT: flt.h t2, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV64ZVFHMIN-NEXT: vmv.x.s t0, v30
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, t1
+; RV64ZVFHMIN-NEXT: flt.h t1, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV64ZVFHMIN-NEXT: sb t1, 118(sp)
+; RV64ZVFHMIN-NEXT: lh a5, 362(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a7
+; RV64ZVFHMIN-NEXT: lh a7, 234(sp)
+; RV64ZVFHMIN-NEXT: flt.h t1, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV64ZVFHMIN-NEXT: vmv.x.s a5, v26
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a7
+; RV64ZVFHMIN-NEXT: flt.h a7, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV64ZVFHMIN-NEXT: sb a7, 117(sp)
+; RV64ZVFHMIN-NEXT: lh a6, 360(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, t0
+; RV64ZVFHMIN-NEXT: lh a7, 232(sp)
+; RV64ZVFHMIN-NEXT: flt.h t0, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV64ZVFHMIN-NEXT: vmv.x.s a6, v28
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a7
+; RV64ZVFHMIN-NEXT: flt.h a7, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV64ZVFHMIN-NEXT: sb a7, 116(sp)
+; RV64ZVFHMIN-NEXT: lh a5, 358(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a6
+; RV64ZVFHMIN-NEXT: lh a6, 230(sp)
+; RV64ZVFHMIN-NEXT: flt.h a7, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV64ZVFHMIN-NEXT: vmv.x.s a5, v16
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a6
+; RV64ZVFHMIN-NEXT: flt.h a6, fa4, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV64ZVFHMIN-NEXT: vmv.x.s a5, v8
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a5
+; RV64ZVFHMIN-NEXT: sb a6, 115(sp)
+; RV64ZVFHMIN-NEXT: lh a5, 356(sp)
+; RV64ZVFHMIN-NEXT: lh a6, 228(sp)
+; RV64ZVFHMIN-NEXT: sb t2, 76(sp)
+; RV64ZVFHMIN-NEXT: sb a4, 77(sp)
+; RV64ZVFHMIN-NEXT: sb a1, 78(sp)
+; RV64ZVFHMIN-NEXT: sb a0, 79(sp)
+; RV64ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 72(sp)
+; RV64ZVFHMIN-NEXT: sb a7, 73(sp)
+; RV64ZVFHMIN-NEXT: sb t0, 74(sp)
+; RV64ZVFHMIN-NEXT: sb t1, 75(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa4, a6
+; RV64ZVFHMIN-NEXT: flt.h a0, fa4, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 114(sp)
+; RV64ZVFHMIN-NEXT: addi a0, sp, 64
+; RV64ZVFHMIN-NEXT: vsetvli zero, a3, e8, m4, ta, ma
+; RV64ZVFHMIN-NEXT: vle8.v v8, (a0)
+; RV64ZVFHMIN-NEXT: vand.vi v8, v8, 1
+; RV64ZVFHMIN-NEXT: vmsne.vi v12, v8, 0
+; RV64ZVFHMIN-NEXT: vsm.v v12, (a2)
+; RV64ZVFHMIN-NEXT: addi sp, s0, -512
+; RV64ZVFHMIN-NEXT: .cfi_def_cfa sp, 512
+; RV64ZVFHMIN-NEXT: ld ra, 504(sp) # 8-byte Folded Reload
+; RV64ZVFHMIN-NEXT: ld s0, 496(sp) # 8-byte Folded Reload
+; RV64ZVFHMIN-NEXT: .cfi_restore ra
+; RV64ZVFHMIN-NEXT: .cfi_restore s0
+; RV64ZVFHMIN-NEXT: addi sp, sp, 512
+; RV64ZVFHMIN-NEXT: .cfi_def_cfa_offset 0
+; RV64ZVFHMIN-NEXT: ret
%a = load <64 x half>, ptr %x
%b = load <64 x half>, ptr %y
%c = fcmp nnan ugt <64 x half> %a, %b
@@ -1069,6 +3001,744 @@ define void @fcmp_ugt_vf_v64f16(ptr %x, half %y, ptr %z) {
; ZVFH-NEXT: vmnot.m v8, v16
; ZVFH-NEXT: vsm.v v8, (a1)
; ZVFH-NEXT: ret
+;
+; RV32ZVFHMIN-LABEL: fcmp_ugt_vf_v64f16:
+; RV32ZVFHMIN: # %bb.0:
+; RV32ZVFHMIN-NEXT: addi sp, sp, -384
+; RV32ZVFHMIN-NEXT: .cfi_def_cfa_offset 384
+; RV32ZVFHMIN-NEXT: sw ra, 380(sp) # 4-byte Folded Spill
+; RV32ZVFHMIN-NEXT: sw s0, 376(sp) # 4-byte Folded Spill
+; RV32ZVFHMIN-NEXT: .cfi_offset ra, -4
+; RV32ZVFHMIN-NEXT: .cfi_offset s0, -8
+; RV32ZVFHMIN-NEXT: addi s0, sp, 384
+; RV32ZVFHMIN-NEXT: .cfi_def_cfa s0, 0
+; RV32ZVFHMIN-NEXT: andi sp, sp, -128
+; RV32ZVFHMIN-NEXT: li a2, 64
+; RV32ZVFHMIN-NEXT: vsetvli zero, a2, e16, m8, ta, ma
+; RV32ZVFHMIN-NEXT: vle16.v v8, (a0)
+; RV32ZVFHMIN-NEXT: addi a0, sp, 128
+; RV32ZVFHMIN-NEXT: vse16.v v8, (a0)
+; RV32ZVFHMIN-NEXT: lh a0, 192(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 96(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 190(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 95(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 188(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 94(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 186(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 93(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 184(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 92(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 182(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 91(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 180(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 90(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 178(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 89(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 176(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 88(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 174(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 87(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 172(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 86(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 170(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 85(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 168(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 84(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 166(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 83(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 164(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 82(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 162(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s a3, v8
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 81(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 160(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a3
+; RV32ZVFHMIN-NEXT: fle.h a3, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a3, a3, 1
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a3, 64(sp)
+; RV32ZVFHMIN-NEXT: sb a0, 80(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 226(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 113(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 224(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 112(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 222(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 111(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 220(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 110(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 218(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 109(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 216(sp)
+; RV32ZVFHMIN-NEXT: vsetivli zero, 1, e16, m1, ta, ma
+; RV32ZVFHMIN-NEXT: vslidedown.vi v10, v8, 7
+; RV32ZVFHMIN-NEXT: vslidedown.vi v11, v8, 6
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 108(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 214(sp)
+; RV32ZVFHMIN-NEXT: vslidedown.vi v12, v8, 5
+; RV32ZVFHMIN-NEXT: vslidedown.vi v13, v8, 4
+; RV32ZVFHMIN-NEXT: vslidedown.vi v14, v8, 3
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 107(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 212(sp)
+; RV32ZVFHMIN-NEXT: vslidedown.vi v15, v8, 2
+; RV32ZVFHMIN-NEXT: vslidedown.vi v16, v8, 1
+; RV32ZVFHMIN-NEXT: vmv.x.s a3, v10
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 106(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 210(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s a4, v11
+; RV32ZVFHMIN-NEXT: vmv.x.s a5, v12
+; RV32ZVFHMIN-NEXT: vmv.x.s a6, v13
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 105(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 208(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s a7, v14
+; RV32ZVFHMIN-NEXT: vmv.x.s t0, v15
+; RV32ZVFHMIN-NEXT: vmv.x.s t1, v16
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a3
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 104(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 206(sp)
+; RV32ZVFHMIN-NEXT: fle.h a3, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV32ZVFHMIN-NEXT: fle.h a4, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 103(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 204(sp)
+; RV32ZVFHMIN-NEXT: fle.h a5, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV32ZVFHMIN-NEXT: fle.h a6, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a7
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 102(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 202(sp)
+; RV32ZVFHMIN-NEXT: fle.h a7, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV32ZVFHMIN-NEXT: fle.h t0, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 101(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 200(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, t1
+; RV32ZVFHMIN-NEXT: fle.h t1, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a3, a3, 1
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 100(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 198(sp)
+; RV32ZVFHMIN-NEXT: xori a4, a4, 1
+; RV32ZVFHMIN-NEXT: xori a5, a5, 1
+; RV32ZVFHMIN-NEXT: xori a6, a6, 1
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 99(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 196(sp)
+; RV32ZVFHMIN-NEXT: xori a7, a7, 1
+; RV32ZVFHMIN-NEXT: xori t0, t0, 1
+; RV32ZVFHMIN-NEXT: xori t1, t1, 1
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 98(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 194(sp)
+; RV32ZVFHMIN-NEXT: sb t1, 65(sp)
+; RV32ZVFHMIN-NEXT: sb t0, 66(sp)
+; RV32ZVFHMIN-NEXT: sb a7, 67(sp)
+; RV32ZVFHMIN-NEXT: sb a6, 68(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a5, 69(sp)
+; RV32ZVFHMIN-NEXT: sb a4, 70(sp)
+; RV32ZVFHMIN-NEXT: sb a3, 71(sp)
+; RV32ZVFHMIN-NEXT: sb a0, 97(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 254(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 127(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 252(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 126(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 250(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 125(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 248(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 124(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 246(sp)
+; RV32ZVFHMIN-NEXT: vsetivli zero, 1, e16, m2, ta, ma
+; RV32ZVFHMIN-NEXT: vslidedown.vi v10, v8, 15
+; RV32ZVFHMIN-NEXT: vslidedown.vi v12, v8, 14
+; RV32ZVFHMIN-NEXT: vslidedown.vi v14, v8, 13
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 123(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 244(sp)
+; RV32ZVFHMIN-NEXT: vslidedown.vi v16, v8, 12
+; RV32ZVFHMIN-NEXT: vslidedown.vi v18, v8, 11
+; RV32ZVFHMIN-NEXT: vslidedown.vi v20, v8, 10
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 122(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 242(sp)
+; RV32ZVFHMIN-NEXT: vslidedown.vi v22, v8, 9
+; RV32ZVFHMIN-NEXT: vslidedown.vi v8, v8, 8
+; RV32ZVFHMIN-NEXT: vmv.x.s a3, v10
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 121(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 240(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s a4, v12
+; RV32ZVFHMIN-NEXT: vmv.x.s a5, v14
+; RV32ZVFHMIN-NEXT: vmv.x.s a6, v16
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 120(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 238(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s a7, v18
+; RV32ZVFHMIN-NEXT: vmv.x.s t0, v20
+; RV32ZVFHMIN-NEXT: vmv.x.s t1, v22
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 119(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 236(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s t2, v8
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a3
+; RV32ZVFHMIN-NEXT: fle.h a3, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 118(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 234(sp)
+; RV32ZVFHMIN-NEXT: fle.h a4, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV32ZVFHMIN-NEXT: fle.h a5, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 117(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 232(sp)
+; RV32ZVFHMIN-NEXT: fle.h a6, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a7
+; RV32ZVFHMIN-NEXT: fle.h a7, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 116(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 230(sp)
+; RV32ZVFHMIN-NEXT: fle.h t0, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, t1
+; RV32ZVFHMIN-NEXT: fle.h t1, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, t2
+; RV32ZVFHMIN-NEXT: xori a3, a3, 1
+; RV32ZVFHMIN-NEXT: xori a4, a4, 1
+; RV32ZVFHMIN-NEXT: xori a5, a5, 1
+; RV32ZVFHMIN-NEXT: xori a6, a6, 1
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 115(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 228(sp)
+; RV32ZVFHMIN-NEXT: sb a6, 76(sp)
+; RV32ZVFHMIN-NEXT: sb a5, 77(sp)
+; RV32ZVFHMIN-NEXT: sb a4, 78(sp)
+; RV32ZVFHMIN-NEXT: sb a3, 79(sp)
+; RV32ZVFHMIN-NEXT: fle.h a3, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a4, a7, 1
+; RV32ZVFHMIN-NEXT: xori a5, t0, 1
+; RV32ZVFHMIN-NEXT: xori a6, t1, 1
+; RV32ZVFHMIN-NEXT: xori a3, a3, 1
+; RV32ZVFHMIN-NEXT: sb a3, 72(sp)
+; RV32ZVFHMIN-NEXT: sb a6, 73(sp)
+; RV32ZVFHMIN-NEXT: sb a5, 74(sp)
+; RV32ZVFHMIN-NEXT: sb a4, 75(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 114(sp)
+; RV32ZVFHMIN-NEXT: addi a0, sp, 64
+; RV32ZVFHMIN-NEXT: vsetvli zero, a2, e8, m4, ta, ma
+; RV32ZVFHMIN-NEXT: vle8.v v8, (a0)
+; RV32ZVFHMIN-NEXT: vand.vi v8, v8, 1
+; RV32ZVFHMIN-NEXT: vmsne.vi v12, v8, 0
+; RV32ZVFHMIN-NEXT: vsm.v v12, (a1)
+; RV32ZVFHMIN-NEXT: addi sp, s0, -384
+; RV32ZVFHMIN-NEXT: .cfi_def_cfa sp, 384
+; RV32ZVFHMIN-NEXT: lw ra, 380(sp) # 4-byte Folded Reload
+; RV32ZVFHMIN-NEXT: lw s0, 376(sp) # 4-byte Folded Reload
+; RV32ZVFHMIN-NEXT: .cfi_restore ra
+; RV32ZVFHMIN-NEXT: .cfi_restore s0
+; RV32ZVFHMIN-NEXT: addi sp, sp, 384
+; RV32ZVFHMIN-NEXT: .cfi_def_cfa_offset 0
+; RV32ZVFHMIN-NEXT: ret
+;
+; RV64ZVFHMIN-LABEL: fcmp_ugt_vf_v64f16:
+; RV64ZVFHMIN: # %bb.0:
+; RV64ZVFHMIN-NEXT: addi sp, sp, -384
+; RV64ZVFHMIN-NEXT: .cfi_def_cfa_offset 384
+; RV64ZVFHMIN-NEXT: sd ra, 376(sp) # 8-byte Folded Spill
+; RV64ZVFHMIN-NEXT: sd s0, 368(sp) # 8-byte Folded Spill
+; RV64ZVFHMIN-NEXT: .cfi_offset ra, -8
+; RV64ZVFHMIN-NEXT: .cfi_offset s0, -16
+; RV64ZVFHMIN-NEXT: addi s0, sp, 384
+; RV64ZVFHMIN-NEXT: .cfi_def_cfa s0, 0
+; RV64ZVFHMIN-NEXT: andi sp, sp, -128
+; RV64ZVFHMIN-NEXT: li a2, 64
+; RV64ZVFHMIN-NEXT: vsetvli zero, a2, e16, m8, ta, ma
+; RV64ZVFHMIN-NEXT: vle16.v v8, (a0)
+; RV64ZVFHMIN-NEXT: addi a0, sp, 128
+; RV64ZVFHMIN-NEXT: vse16.v v8, (a0)
+; RV64ZVFHMIN-NEXT: lh a0, 192(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 96(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 190(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 95(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 188(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 94(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 186(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 93(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 184(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 92(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 182(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 91(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 180(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 90(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 178(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 89(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 176(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 88(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 174(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 87(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 172(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 86(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 170(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 85(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 168(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 84(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 166(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 83(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 164(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 82(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 162(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s a3, v8
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 81(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 160(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a3
+; RV64ZVFHMIN-NEXT: fle.h a3, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a3, a3, 1
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a3, 64(sp)
+; RV64ZVFHMIN-NEXT: sb a0, 80(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 226(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 113(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 224(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 112(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 222(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 111(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 220(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 110(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 218(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 109(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 216(sp)
+; RV64ZVFHMIN-NEXT: vsetivli zero, 1, e16, m1, ta, ma
+; RV64ZVFHMIN-NEXT: vslidedown.vi v10, v8, 7
+; RV64ZVFHMIN-NEXT: vslidedown.vi v11, v8, 6
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 108(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 214(sp)
+; RV64ZVFHMIN-NEXT: vslidedown.vi v12, v8, 5
+; RV64ZVFHMIN-NEXT: vslidedown.vi v13, v8, 4
+; RV64ZVFHMIN-NEXT: vslidedown.vi v14, v8, 3
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 107(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 212(sp)
+; RV64ZVFHMIN-NEXT: vslidedown.vi v15, v8, 2
+; RV64ZVFHMIN-NEXT: vslidedown.vi v16, v8, 1
+; RV64ZVFHMIN-NEXT: vmv.x.s a3, v10
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 106(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 210(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s a4, v11
+; RV64ZVFHMIN-NEXT: vmv.x.s a5, v12
+; RV64ZVFHMIN-NEXT: vmv.x.s a6, v13
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 105(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 208(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s a7, v14
+; RV64ZVFHMIN-NEXT: vmv.x.s t0, v15
+; RV64ZVFHMIN-NEXT: vmv.x.s t1, v16
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a3
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 104(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 206(sp)
+; RV64ZVFHMIN-NEXT: fle.h a3, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV64ZVFHMIN-NEXT: fle.h a4, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 103(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 204(sp)
+; RV64ZVFHMIN-NEXT: fle.h a5, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV64ZVFHMIN-NEXT: fle.h a6, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a7
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 102(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 202(sp)
+; RV64ZVFHMIN-NEXT: fle.h a7, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV64ZVFHMIN-NEXT: fle.h t0, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 101(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 200(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, t1
+; RV64ZVFHMIN-NEXT: fle.h t1, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a3, a3, 1
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 100(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 198(sp)
+; RV64ZVFHMIN-NEXT: xori a4, a4, 1
+; RV64ZVFHMIN-NEXT: xori a5, a5, 1
+; RV64ZVFHMIN-NEXT: xori a6, a6, 1
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 99(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 196(sp)
+; RV64ZVFHMIN-NEXT: xori a7, a7, 1
+; RV64ZVFHMIN-NEXT: xori t0, t0, 1
+; RV64ZVFHMIN-NEXT: xori t1, t1, 1
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 98(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 194(sp)
+; RV64ZVFHMIN-NEXT: sb t1, 65(sp)
+; RV64ZVFHMIN-NEXT: sb t0, 66(sp)
+; RV64ZVFHMIN-NEXT: sb a7, 67(sp)
+; RV64ZVFHMIN-NEXT: sb a6, 68(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a5, 69(sp)
+; RV64ZVFHMIN-NEXT: sb a4, 70(sp)
+; RV64ZVFHMIN-NEXT: sb a3, 71(sp)
+; RV64ZVFHMIN-NEXT: sb a0, 97(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 254(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 127(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 252(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 126(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 250(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 125(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 248(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 124(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 246(sp)
+; RV64ZVFHMIN-NEXT: vsetivli zero, 1, e16, m2, ta, ma
+; RV64ZVFHMIN-NEXT: vslidedown.vi v10, v8, 15
+; RV64ZVFHMIN-NEXT: vslidedown.vi v12, v8, 14
+; RV64ZVFHMIN-NEXT: vslidedown.vi v14, v8, 13
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 123(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 244(sp)
+; RV64ZVFHMIN-NEXT: vslidedown.vi v16, v8, 12
+; RV64ZVFHMIN-NEXT: vslidedown.vi v18, v8, 11
+; RV64ZVFHMIN-NEXT: vslidedown.vi v20, v8, 10
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 122(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 242(sp)
+; RV64ZVFHMIN-NEXT: vslidedown.vi v22, v8, 9
+; RV64ZVFHMIN-NEXT: vslidedown.vi v8, v8, 8
+; RV64ZVFHMIN-NEXT: vmv.x.s a3, v10
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 121(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 240(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s a4, v12
+; RV64ZVFHMIN-NEXT: vmv.x.s a5, v14
+; RV64ZVFHMIN-NEXT: vmv.x.s a6, v16
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 120(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 238(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s a7, v18
+; RV64ZVFHMIN-NEXT: vmv.x.s t0, v20
+; RV64ZVFHMIN-NEXT: vmv.x.s t1, v22
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 119(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 236(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s t2, v8
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a3
+; RV64ZVFHMIN-NEXT: fle.h a3, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 118(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 234(sp)
+; RV64ZVFHMIN-NEXT: fle.h a4, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV64ZVFHMIN-NEXT: fle.h a5, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 117(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 232(sp)
+; RV64ZVFHMIN-NEXT: fle.h a6, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a7
+; RV64ZVFHMIN-NEXT: fle.h a7, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 116(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 230(sp)
+; RV64ZVFHMIN-NEXT: fle.h t0, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, t1
+; RV64ZVFHMIN-NEXT: fle.h t1, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, t2
+; RV64ZVFHMIN-NEXT: xori a3, a3, 1
+; RV64ZVFHMIN-NEXT: xori a4, a4, 1
+; RV64ZVFHMIN-NEXT: xori a5, a5, 1
+; RV64ZVFHMIN-NEXT: xori a6, a6, 1
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 115(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 228(sp)
+; RV64ZVFHMIN-NEXT: sb a6, 76(sp)
+; RV64ZVFHMIN-NEXT: sb a5, 77(sp)
+; RV64ZVFHMIN-NEXT: sb a4, 78(sp)
+; RV64ZVFHMIN-NEXT: sb a3, 79(sp)
+; RV64ZVFHMIN-NEXT: fle.h a3, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a4, a7, 1
+; RV64ZVFHMIN-NEXT: xori a5, t0, 1
+; RV64ZVFHMIN-NEXT: xori a6, t1, 1
+; RV64ZVFHMIN-NEXT: xori a3, a3, 1
+; RV64ZVFHMIN-NEXT: sb a3, 72(sp)
+; RV64ZVFHMIN-NEXT: sb a6, 73(sp)
+; RV64ZVFHMIN-NEXT: sb a5, 74(sp)
+; RV64ZVFHMIN-NEXT: sb a4, 75(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 114(sp)
+; RV64ZVFHMIN-NEXT: addi a0, sp, 64
+; RV64ZVFHMIN-NEXT: vsetvli zero, a2, e8, m4, ta, ma
+; RV64ZVFHMIN-NEXT: vle8.v v8, (a0)
+; RV64ZVFHMIN-NEXT: vand.vi v8, v8, 1
+; RV64ZVFHMIN-NEXT: vmsne.vi v12, v8, 0
+; RV64ZVFHMIN-NEXT: vsm.v v12, (a1)
+; RV64ZVFHMIN-NEXT: addi sp, s0, -384
+; RV64ZVFHMIN-NEXT: .cfi_def_cfa sp, 384
+; RV64ZVFHMIN-NEXT: ld ra, 376(sp) # 8-byte Folded Reload
+; RV64ZVFHMIN-NEXT: ld s0, 368(sp) # 8-byte Folded Reload
+; RV64ZVFHMIN-NEXT: .cfi_restore ra
+; RV64ZVFHMIN-NEXT: .cfi_restore s0
+; RV64ZVFHMIN-NEXT: addi sp, sp, 384
+; RV64ZVFHMIN-NEXT: .cfi_def_cfa_offset 0
+; RV64ZVFHMIN-NEXT: ret
%a = load <64 x half>, ptr %x
%b = insertelement <64 x half> poison, half %y, i32 0
%c = shufflevector <64 x half> %b, <64 x half> poison, <64 x i32> zeroinitializer
@@ -1086,6 +3756,616 @@ define void @fcmp_ugt_vf_v64f16_nonans(ptr %x, half %y, ptr %z) {
; ZVFH-NEXT: vmfgt.vf v16, v8, fa0
; ZVFH-NEXT: vsm.v v16, (a1)
; ZVFH-NEXT: ret
+;
+; RV32ZVFHMIN-LABEL: fcmp_ugt_vf_v64f16_nonans:
+; RV32ZVFHMIN: # %bb.0:
+; RV32ZVFHMIN-NEXT: addi sp, sp, -384
+; RV32ZVFHMIN-NEXT: .cfi_def_cfa_offset 384
+; RV32ZVFHMIN-NEXT: sw ra, 380(sp) # 4-byte Folded Spill
+; RV32ZVFHMIN-NEXT: sw s0, 376(sp) # 4-byte Folded Spill
+; RV32ZVFHMIN-NEXT: .cfi_offset ra, -4
+; RV32ZVFHMIN-NEXT: .cfi_offset s0, -8
+; RV32ZVFHMIN-NEXT: addi s0, sp, 384
+; RV32ZVFHMIN-NEXT: .cfi_def_cfa s0, 0
+; RV32ZVFHMIN-NEXT: andi sp, sp, -128
+; RV32ZVFHMIN-NEXT: li a2, 64
+; RV32ZVFHMIN-NEXT: vsetvli zero, a2, e16, m8, ta, ma
+; RV32ZVFHMIN-NEXT: vle16.v v8, (a0)
+; RV32ZVFHMIN-NEXT: addi a0, sp, 128
+; RV32ZVFHMIN-NEXT: vse16.v v8, (a0)
+; RV32ZVFHMIN-NEXT: lh a0, 192(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 96(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 190(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 95(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 188(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 94(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 186(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 93(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 184(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 92(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 182(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 91(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 180(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 90(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 178(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 89(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 176(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 88(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 174(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 87(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 172(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 86(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 170(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 85(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 168(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 84(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 166(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 83(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 164(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 82(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 162(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 81(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 160(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s a3, v8
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a3
+; RV32ZVFHMIN-NEXT: flt.h a3, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a3, 64(sp)
+; RV32ZVFHMIN-NEXT: sb a0, 80(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 226(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 113(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 224(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 112(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 222(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 111(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 220(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 110(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 218(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 109(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 216(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 108(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 214(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 107(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 212(sp)
+; RV32ZVFHMIN-NEXT: vsetivli zero, 1, e16, m1, ta, ma
+; RV32ZVFHMIN-NEXT: vslidedown.vi v10, v8, 7
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 106(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 210(sp)
+; RV32ZVFHMIN-NEXT: vslidedown.vi v11, v8, 6
+; RV32ZVFHMIN-NEXT: vslidedown.vi v12, v8, 5
+; RV32ZVFHMIN-NEXT: vslidedown.vi v13, v8, 4
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 105(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 208(sp)
+; RV32ZVFHMIN-NEXT: vslidedown.vi v14, v8, 3
+; RV32ZVFHMIN-NEXT: vslidedown.vi v15, v8, 2
+; RV32ZVFHMIN-NEXT: vslidedown.vi v16, v8, 1
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 104(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 206(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s a3, v10
+; RV32ZVFHMIN-NEXT: vmv.x.s a4, v11
+; RV32ZVFHMIN-NEXT: vmv.x.s a5, v12
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 103(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 204(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s a6, v13
+; RV32ZVFHMIN-NEXT: vmv.x.s a7, v14
+; RV32ZVFHMIN-NEXT: vmv.x.s t0, v15
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 102(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 202(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s t1, v16
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a3
+; RV32ZVFHMIN-NEXT: flt.h a3, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV32ZVFHMIN-NEXT: sb a0, 101(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 200(sp)
+; RV32ZVFHMIN-NEXT: flt.h a4, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV32ZVFHMIN-NEXT: flt.h a5, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV32ZVFHMIN-NEXT: sb a0, 100(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 198(sp)
+; RV32ZVFHMIN-NEXT: flt.h a6, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a7
+; RV32ZVFHMIN-NEXT: flt.h a7, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV32ZVFHMIN-NEXT: sb a0, 99(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 196(sp)
+; RV32ZVFHMIN-NEXT: flt.h t0, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, t1
+; RV32ZVFHMIN-NEXT: flt.h t1, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 98(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 194(sp)
+; RV32ZVFHMIN-NEXT: sb t1, 65(sp)
+; RV32ZVFHMIN-NEXT: sb t0, 66(sp)
+; RV32ZVFHMIN-NEXT: sb a7, 67(sp)
+; RV32ZVFHMIN-NEXT: sb a6, 68(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a5, 69(sp)
+; RV32ZVFHMIN-NEXT: sb a4, 70(sp)
+; RV32ZVFHMIN-NEXT: sb a3, 71(sp)
+; RV32ZVFHMIN-NEXT: sb a0, 97(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 254(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 127(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 252(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 126(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 250(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 125(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 248(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 124(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 246(sp)
+; RV32ZVFHMIN-NEXT: vsetivli zero, 1, e16, m2, ta, ma
+; RV32ZVFHMIN-NEXT: vslidedown.vi v10, v8, 15
+; RV32ZVFHMIN-NEXT: vslidedown.vi v12, v8, 14
+; RV32ZVFHMIN-NEXT: vslidedown.vi v14, v8, 13
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 123(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 244(sp)
+; RV32ZVFHMIN-NEXT: vslidedown.vi v16, v8, 12
+; RV32ZVFHMIN-NEXT: vslidedown.vi v18, v8, 11
+; RV32ZVFHMIN-NEXT: vslidedown.vi v20, v8, 10
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 122(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 242(sp)
+; RV32ZVFHMIN-NEXT: vslidedown.vi v22, v8, 9
+; RV32ZVFHMIN-NEXT: vslidedown.vi v8, v8, 8
+; RV32ZVFHMIN-NEXT: vmv.x.s a3, v10
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 121(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 240(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s a4, v12
+; RV32ZVFHMIN-NEXT: vmv.x.s a5, v14
+; RV32ZVFHMIN-NEXT: vmv.x.s a6, v16
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 120(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 238(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s a7, v18
+; RV32ZVFHMIN-NEXT: vmv.x.s t0, v20
+; RV32ZVFHMIN-NEXT: vmv.x.s t1, v22
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 119(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 236(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s t2, v8
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a3
+; RV32ZVFHMIN-NEXT: flt.h a3, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV32ZVFHMIN-NEXT: sb a0, 118(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 234(sp)
+; RV32ZVFHMIN-NEXT: flt.h a4, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV32ZVFHMIN-NEXT: flt.h a5, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV32ZVFHMIN-NEXT: sb a0, 117(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 232(sp)
+; RV32ZVFHMIN-NEXT: flt.h a6, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a7
+; RV32ZVFHMIN-NEXT: flt.h a7, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV32ZVFHMIN-NEXT: sb a0, 116(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 230(sp)
+; RV32ZVFHMIN-NEXT: flt.h t0, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, t1
+; RV32ZVFHMIN-NEXT: flt.h t1, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, t2
+; RV32ZVFHMIN-NEXT: sb a0, 115(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 228(sp)
+; RV32ZVFHMIN-NEXT: sb a6, 76(sp)
+; RV32ZVFHMIN-NEXT: sb a5, 77(sp)
+; RV32ZVFHMIN-NEXT: sb a4, 78(sp)
+; RV32ZVFHMIN-NEXT: sb a3, 79(sp)
+; RV32ZVFHMIN-NEXT: flt.h a3, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a3, 72(sp)
+; RV32ZVFHMIN-NEXT: sb t1, 73(sp)
+; RV32ZVFHMIN-NEXT: sb t0, 74(sp)
+; RV32ZVFHMIN-NEXT: sb a7, 75(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: sb a0, 114(sp)
+; RV32ZVFHMIN-NEXT: addi a0, sp, 64
+; RV32ZVFHMIN-NEXT: vsetvli zero, a2, e8, m4, ta, ma
+; RV32ZVFHMIN-NEXT: vle8.v v8, (a0)
+; RV32ZVFHMIN-NEXT: vand.vi v8, v8, 1
+; RV32ZVFHMIN-NEXT: vmsne.vi v12, v8, 0
+; RV32ZVFHMIN-NEXT: vsm.v v12, (a1)
+; RV32ZVFHMIN-NEXT: addi sp, s0, -384
+; RV32ZVFHMIN-NEXT: .cfi_def_cfa sp, 384
+; RV32ZVFHMIN-NEXT: lw ra, 380(sp) # 4-byte Folded Reload
+; RV32ZVFHMIN-NEXT: lw s0, 376(sp) # 4-byte Folded Reload
+; RV32ZVFHMIN-NEXT: .cfi_restore ra
+; RV32ZVFHMIN-NEXT: .cfi_restore s0
+; RV32ZVFHMIN-NEXT: addi sp, sp, 384
+; RV32ZVFHMIN-NEXT: .cfi_def_cfa_offset 0
+; RV32ZVFHMIN-NEXT: ret
+;
+; RV64ZVFHMIN-LABEL: fcmp_ugt_vf_v64f16_nonans:
+; RV64ZVFHMIN: # %bb.0:
+; RV64ZVFHMIN-NEXT: addi sp, sp, -384
+; RV64ZVFHMIN-NEXT: .cfi_def_cfa_offset 384
+; RV64ZVFHMIN-NEXT: sd ra, 376(sp) # 8-byte Folded Spill
+; RV64ZVFHMIN-NEXT: sd s0, 368(sp) # 8-byte Folded Spill
+; RV64ZVFHMIN-NEXT: .cfi_offset ra, -8
+; RV64ZVFHMIN-NEXT: .cfi_offset s0, -16
+; RV64ZVFHMIN-NEXT: addi s0, sp, 384
+; RV64ZVFHMIN-NEXT: .cfi_def_cfa s0, 0
+; RV64ZVFHMIN-NEXT: andi sp, sp, -128
+; RV64ZVFHMIN-NEXT: li a2, 64
+; RV64ZVFHMIN-NEXT: vsetvli zero, a2, e16, m8, ta, ma
+; RV64ZVFHMIN-NEXT: vle16.v v8, (a0)
+; RV64ZVFHMIN-NEXT: addi a0, sp, 128
+; RV64ZVFHMIN-NEXT: vse16.v v8, (a0)
+; RV64ZVFHMIN-NEXT: lh a0, 192(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 96(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 190(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 95(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 188(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 94(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 186(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 93(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 184(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 92(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 182(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 91(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 180(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 90(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 178(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 89(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 176(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 88(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 174(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 87(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 172(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 86(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 170(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 85(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 168(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 84(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 166(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 83(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 164(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 82(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 162(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 81(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 160(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s a3, v8
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a3
+; RV64ZVFHMIN-NEXT: flt.h a3, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a3, 64(sp)
+; RV64ZVFHMIN-NEXT: sb a0, 80(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 226(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 113(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 224(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 112(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 222(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 111(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 220(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 110(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 218(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 109(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 216(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 108(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 214(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 107(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 212(sp)
+; RV64ZVFHMIN-NEXT: vsetivli zero, 1, e16, m1, ta, ma
+; RV64ZVFHMIN-NEXT: vslidedown.vi v10, v8, 7
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 106(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 210(sp)
+; RV64ZVFHMIN-NEXT: vslidedown.vi v11, v8, 6
+; RV64ZVFHMIN-NEXT: vslidedown.vi v12, v8, 5
+; RV64ZVFHMIN-NEXT: vslidedown.vi v13, v8, 4
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 105(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 208(sp)
+; RV64ZVFHMIN-NEXT: vslidedown.vi v14, v8, 3
+; RV64ZVFHMIN-NEXT: vslidedown.vi v15, v8, 2
+; RV64ZVFHMIN-NEXT: vslidedown.vi v16, v8, 1
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 104(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 206(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s a3, v10
+; RV64ZVFHMIN-NEXT: vmv.x.s a4, v11
+; RV64ZVFHMIN-NEXT: vmv.x.s a5, v12
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 103(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 204(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s a6, v13
+; RV64ZVFHMIN-NEXT: vmv.x.s a7, v14
+; RV64ZVFHMIN-NEXT: vmv.x.s t0, v15
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 102(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 202(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s t1, v16
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a3
+; RV64ZVFHMIN-NEXT: flt.h a3, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV64ZVFHMIN-NEXT: sb a0, 101(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 200(sp)
+; RV64ZVFHMIN-NEXT: flt.h a4, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV64ZVFHMIN-NEXT: flt.h a5, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV64ZVFHMIN-NEXT: sb a0, 100(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 198(sp)
+; RV64ZVFHMIN-NEXT: flt.h a6, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a7
+; RV64ZVFHMIN-NEXT: flt.h a7, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV64ZVFHMIN-NEXT: sb a0, 99(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 196(sp)
+; RV64ZVFHMIN-NEXT: flt.h t0, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, t1
+; RV64ZVFHMIN-NEXT: flt.h t1, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 98(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 194(sp)
+; RV64ZVFHMIN-NEXT: sb t1, 65(sp)
+; RV64ZVFHMIN-NEXT: sb t0, 66(sp)
+; RV64ZVFHMIN-NEXT: sb a7, 67(sp)
+; RV64ZVFHMIN-NEXT: sb a6, 68(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a5, 69(sp)
+; RV64ZVFHMIN-NEXT: sb a4, 70(sp)
+; RV64ZVFHMIN-NEXT: sb a3, 71(sp)
+; RV64ZVFHMIN-NEXT: sb a0, 97(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 254(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 127(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 252(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 126(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 250(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 125(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 248(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 124(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 246(sp)
+; RV64ZVFHMIN-NEXT: vsetivli zero, 1, e16, m2, ta, ma
+; RV64ZVFHMIN-NEXT: vslidedown.vi v10, v8, 15
+; RV64ZVFHMIN-NEXT: vslidedown.vi v12, v8, 14
+; RV64ZVFHMIN-NEXT: vslidedown.vi v14, v8, 13
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 123(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 244(sp)
+; RV64ZVFHMIN-NEXT: vslidedown.vi v16, v8, 12
+; RV64ZVFHMIN-NEXT: vslidedown.vi v18, v8, 11
+; RV64ZVFHMIN-NEXT: vslidedown.vi v20, v8, 10
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 122(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 242(sp)
+; RV64ZVFHMIN-NEXT: vslidedown.vi v22, v8, 9
+; RV64ZVFHMIN-NEXT: vslidedown.vi v8, v8, 8
+; RV64ZVFHMIN-NEXT: vmv.x.s a3, v10
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 121(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 240(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s a4, v12
+; RV64ZVFHMIN-NEXT: vmv.x.s a5, v14
+; RV64ZVFHMIN-NEXT: vmv.x.s a6, v16
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 120(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 238(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s a7, v18
+; RV64ZVFHMIN-NEXT: vmv.x.s t0, v20
+; RV64ZVFHMIN-NEXT: vmv.x.s t1, v22
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 119(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 236(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s t2, v8
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a3
+; RV64ZVFHMIN-NEXT: flt.h a3, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV64ZVFHMIN-NEXT: sb a0, 118(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 234(sp)
+; RV64ZVFHMIN-NEXT: flt.h a4, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV64ZVFHMIN-NEXT: flt.h a5, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV64ZVFHMIN-NEXT: sb a0, 117(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 232(sp)
+; RV64ZVFHMIN-NEXT: flt.h a6, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a7
+; RV64ZVFHMIN-NEXT: flt.h a7, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV64ZVFHMIN-NEXT: sb a0, 116(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 230(sp)
+; RV64ZVFHMIN-NEXT: flt.h t0, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, t1
+; RV64ZVFHMIN-NEXT: flt.h t1, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, t2
+; RV64ZVFHMIN-NEXT: sb a0, 115(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 228(sp)
+; RV64ZVFHMIN-NEXT: sb a6, 76(sp)
+; RV64ZVFHMIN-NEXT: sb a5, 77(sp)
+; RV64ZVFHMIN-NEXT: sb a4, 78(sp)
+; RV64ZVFHMIN-NEXT: sb a3, 79(sp)
+; RV64ZVFHMIN-NEXT: flt.h a3, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a3, 72(sp)
+; RV64ZVFHMIN-NEXT: sb t1, 73(sp)
+; RV64ZVFHMIN-NEXT: sb t0, 74(sp)
+; RV64ZVFHMIN-NEXT: sb a7, 75(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: sb a0, 114(sp)
+; RV64ZVFHMIN-NEXT: addi a0, sp, 64
+; RV64ZVFHMIN-NEXT: vsetvli zero, a2, e8, m4, ta, ma
+; RV64ZVFHMIN-NEXT: vle8.v v8, (a0)
+; RV64ZVFHMIN-NEXT: vand.vi v8, v8, 1
+; RV64ZVFHMIN-NEXT: vmsne.vi v12, v8, 0
+; RV64ZVFHMIN-NEXT: vsm.v v12, (a1)
+; RV64ZVFHMIN-NEXT: addi sp, s0, -384
+; RV64ZVFHMIN-NEXT: .cfi_def_cfa sp, 384
+; RV64ZVFHMIN-NEXT: ld ra, 376(sp) # 8-byte Folded Reload
+; RV64ZVFHMIN-NEXT: ld s0, 368(sp) # 8-byte Folded Reload
+; RV64ZVFHMIN-NEXT: .cfi_restore ra
+; RV64ZVFHMIN-NEXT: .cfi_restore s0
+; RV64ZVFHMIN-NEXT: addi sp, sp, 384
+; RV64ZVFHMIN-NEXT: .cfi_def_cfa_offset 0
+; RV64ZVFHMIN-NEXT: ret
%a = load <64 x half>, ptr %x
%b = insertelement <64 x half> poison, half %y, i32 0
%c = shufflevector <64 x half> %b, <64 x half> poison, <64 x i32> zeroinitializer
@@ -1710,6 +4990,744 @@ define void @fcmp_ugt_fv_v64f16(ptr %x, half %y, ptr %z) {
; ZVFH-NEXT: vmnot.m v8, v16
; ZVFH-NEXT: vsm.v v8, (a1)
; ZVFH-NEXT: ret
+;
+; RV32ZVFHMIN-LABEL: fcmp_ugt_fv_v64f16:
+; RV32ZVFHMIN: # %bb.0:
+; RV32ZVFHMIN-NEXT: addi sp, sp, -384
+; RV32ZVFHMIN-NEXT: .cfi_def_cfa_offset 384
+; RV32ZVFHMIN-NEXT: sw ra, 380(sp) # 4-byte Folded Spill
+; RV32ZVFHMIN-NEXT: sw s0, 376(sp) # 4-byte Folded Spill
+; RV32ZVFHMIN-NEXT: .cfi_offset ra, -4
+; RV32ZVFHMIN-NEXT: .cfi_offset s0, -8
+; RV32ZVFHMIN-NEXT: addi s0, sp, 384
+; RV32ZVFHMIN-NEXT: .cfi_def_cfa s0, 0
+; RV32ZVFHMIN-NEXT: andi sp, sp, -128
+; RV32ZVFHMIN-NEXT: li a2, 64
+; RV32ZVFHMIN-NEXT: vsetvli zero, a2, e16, m8, ta, ma
+; RV32ZVFHMIN-NEXT: vle16.v v8, (a0)
+; RV32ZVFHMIN-NEXT: addi a0, sp, 128
+; RV32ZVFHMIN-NEXT: vse16.v v8, (a0)
+; RV32ZVFHMIN-NEXT: lh a0, 192(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 96(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 190(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 95(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 188(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 94(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 186(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 93(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 184(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 92(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 182(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 91(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 180(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 90(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 178(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 89(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 176(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 88(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 174(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 87(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 172(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 86(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 170(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 85(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 168(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 84(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 166(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 83(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 164(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 82(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 162(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s a3, v8
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 81(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 160(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a3
+; RV32ZVFHMIN-NEXT: fle.h a3, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a3, a3, 1
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a3, 64(sp)
+; RV32ZVFHMIN-NEXT: sb a0, 80(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 226(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 113(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 224(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 112(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 222(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 111(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 220(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 110(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 218(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 109(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 216(sp)
+; RV32ZVFHMIN-NEXT: vsetivli zero, 1, e16, m1, ta, ma
+; RV32ZVFHMIN-NEXT: vslidedown.vi v10, v8, 7
+; RV32ZVFHMIN-NEXT: vslidedown.vi v11, v8, 6
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 108(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 214(sp)
+; RV32ZVFHMIN-NEXT: vslidedown.vi v12, v8, 5
+; RV32ZVFHMIN-NEXT: vslidedown.vi v13, v8, 4
+; RV32ZVFHMIN-NEXT: vslidedown.vi v14, v8, 3
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 107(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 212(sp)
+; RV32ZVFHMIN-NEXT: vslidedown.vi v15, v8, 2
+; RV32ZVFHMIN-NEXT: vslidedown.vi v16, v8, 1
+; RV32ZVFHMIN-NEXT: vmv.x.s a3, v10
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 106(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 210(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s a4, v11
+; RV32ZVFHMIN-NEXT: vmv.x.s a5, v12
+; RV32ZVFHMIN-NEXT: vmv.x.s a6, v13
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 105(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 208(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s a7, v14
+; RV32ZVFHMIN-NEXT: vmv.x.s t0, v15
+; RV32ZVFHMIN-NEXT: vmv.x.s t1, v16
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a3
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 104(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 206(sp)
+; RV32ZVFHMIN-NEXT: fle.h a3, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV32ZVFHMIN-NEXT: fle.h a4, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 103(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 204(sp)
+; RV32ZVFHMIN-NEXT: fle.h a5, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV32ZVFHMIN-NEXT: fle.h a6, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a7
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 102(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 202(sp)
+; RV32ZVFHMIN-NEXT: fle.h a7, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV32ZVFHMIN-NEXT: fle.h t0, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 101(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 200(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, t1
+; RV32ZVFHMIN-NEXT: fle.h t1, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a3, a3, 1
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 100(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 198(sp)
+; RV32ZVFHMIN-NEXT: xori a4, a4, 1
+; RV32ZVFHMIN-NEXT: xori a5, a5, 1
+; RV32ZVFHMIN-NEXT: xori a6, a6, 1
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 99(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 196(sp)
+; RV32ZVFHMIN-NEXT: xori a7, a7, 1
+; RV32ZVFHMIN-NEXT: xori t0, t0, 1
+; RV32ZVFHMIN-NEXT: xori t1, t1, 1
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 98(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 194(sp)
+; RV32ZVFHMIN-NEXT: sb t1, 65(sp)
+; RV32ZVFHMIN-NEXT: sb t0, 66(sp)
+; RV32ZVFHMIN-NEXT: sb a7, 67(sp)
+; RV32ZVFHMIN-NEXT: sb a6, 68(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a5, 69(sp)
+; RV32ZVFHMIN-NEXT: sb a4, 70(sp)
+; RV32ZVFHMIN-NEXT: sb a3, 71(sp)
+; RV32ZVFHMIN-NEXT: sb a0, 97(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 254(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 127(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 252(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 126(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 250(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 125(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 248(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 124(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 246(sp)
+; RV32ZVFHMIN-NEXT: vsetivli zero, 1, e16, m2, ta, ma
+; RV32ZVFHMIN-NEXT: vslidedown.vi v10, v8, 15
+; RV32ZVFHMIN-NEXT: vslidedown.vi v12, v8, 14
+; RV32ZVFHMIN-NEXT: vslidedown.vi v14, v8, 13
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 123(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 244(sp)
+; RV32ZVFHMIN-NEXT: vslidedown.vi v16, v8, 12
+; RV32ZVFHMIN-NEXT: vslidedown.vi v18, v8, 11
+; RV32ZVFHMIN-NEXT: vslidedown.vi v20, v8, 10
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 122(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 242(sp)
+; RV32ZVFHMIN-NEXT: vslidedown.vi v22, v8, 9
+; RV32ZVFHMIN-NEXT: vslidedown.vi v8, v8, 8
+; RV32ZVFHMIN-NEXT: vmv.x.s a3, v10
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 121(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 240(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s a4, v12
+; RV32ZVFHMIN-NEXT: vmv.x.s a5, v14
+; RV32ZVFHMIN-NEXT: vmv.x.s a6, v16
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 120(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 238(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s a7, v18
+; RV32ZVFHMIN-NEXT: vmv.x.s t0, v20
+; RV32ZVFHMIN-NEXT: vmv.x.s t1, v22
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 119(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 236(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s t2, v8
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a3
+; RV32ZVFHMIN-NEXT: fle.h a3, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 118(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 234(sp)
+; RV32ZVFHMIN-NEXT: fle.h a4, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV32ZVFHMIN-NEXT: fle.h a5, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 117(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 232(sp)
+; RV32ZVFHMIN-NEXT: fle.h a6, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a7
+; RV32ZVFHMIN-NEXT: fle.h a7, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 116(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 230(sp)
+; RV32ZVFHMIN-NEXT: fle.h t0, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, t1
+; RV32ZVFHMIN-NEXT: fle.h t1, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, t2
+; RV32ZVFHMIN-NEXT: xori a3, a3, 1
+; RV32ZVFHMIN-NEXT: xori a4, a4, 1
+; RV32ZVFHMIN-NEXT: xori a5, a5, 1
+; RV32ZVFHMIN-NEXT: xori a6, a6, 1
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 115(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 228(sp)
+; RV32ZVFHMIN-NEXT: sb a6, 76(sp)
+; RV32ZVFHMIN-NEXT: sb a5, 77(sp)
+; RV32ZVFHMIN-NEXT: sb a4, 78(sp)
+; RV32ZVFHMIN-NEXT: sb a3, 79(sp)
+; RV32ZVFHMIN-NEXT: fle.h a3, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a4, a7, 1
+; RV32ZVFHMIN-NEXT: xori a5, t0, 1
+; RV32ZVFHMIN-NEXT: xori a6, t1, 1
+; RV32ZVFHMIN-NEXT: xori a3, a3, 1
+; RV32ZVFHMIN-NEXT: sb a3, 72(sp)
+; RV32ZVFHMIN-NEXT: sb a6, 73(sp)
+; RV32ZVFHMIN-NEXT: sb a5, 74(sp)
+; RV32ZVFHMIN-NEXT: sb a4, 75(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV32ZVFHMIN-NEXT: xori a0, a0, 1
+; RV32ZVFHMIN-NEXT: sb a0, 114(sp)
+; RV32ZVFHMIN-NEXT: addi a0, sp, 64
+; RV32ZVFHMIN-NEXT: vsetvli zero, a2, e8, m4, ta, ma
+; RV32ZVFHMIN-NEXT: vle8.v v8, (a0)
+; RV32ZVFHMIN-NEXT: vand.vi v8, v8, 1
+; RV32ZVFHMIN-NEXT: vmsne.vi v12, v8, 0
+; RV32ZVFHMIN-NEXT: vsm.v v12, (a1)
+; RV32ZVFHMIN-NEXT: addi sp, s0, -384
+; RV32ZVFHMIN-NEXT: .cfi_def_cfa sp, 384
+; RV32ZVFHMIN-NEXT: lw ra, 380(sp) # 4-byte Folded Reload
+; RV32ZVFHMIN-NEXT: lw s0, 376(sp) # 4-byte Folded Reload
+; RV32ZVFHMIN-NEXT: .cfi_restore ra
+; RV32ZVFHMIN-NEXT: .cfi_restore s0
+; RV32ZVFHMIN-NEXT: addi sp, sp, 384
+; RV32ZVFHMIN-NEXT: .cfi_def_cfa_offset 0
+; RV32ZVFHMIN-NEXT: ret
+;
+; RV64ZVFHMIN-LABEL: fcmp_ugt_fv_v64f16:
+; RV64ZVFHMIN: # %bb.0:
+; RV64ZVFHMIN-NEXT: addi sp, sp, -384
+; RV64ZVFHMIN-NEXT: .cfi_def_cfa_offset 384
+; RV64ZVFHMIN-NEXT: sd ra, 376(sp) # 8-byte Folded Spill
+; RV64ZVFHMIN-NEXT: sd s0, 368(sp) # 8-byte Folded Spill
+; RV64ZVFHMIN-NEXT: .cfi_offset ra, -8
+; RV64ZVFHMIN-NEXT: .cfi_offset s0, -16
+; RV64ZVFHMIN-NEXT: addi s0, sp, 384
+; RV64ZVFHMIN-NEXT: .cfi_def_cfa s0, 0
+; RV64ZVFHMIN-NEXT: andi sp, sp, -128
+; RV64ZVFHMIN-NEXT: li a2, 64
+; RV64ZVFHMIN-NEXT: vsetvli zero, a2, e16, m8, ta, ma
+; RV64ZVFHMIN-NEXT: vle16.v v8, (a0)
+; RV64ZVFHMIN-NEXT: addi a0, sp, 128
+; RV64ZVFHMIN-NEXT: vse16.v v8, (a0)
+; RV64ZVFHMIN-NEXT: lh a0, 192(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 96(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 190(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 95(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 188(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 94(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 186(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 93(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 184(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 92(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 182(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 91(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 180(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 90(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 178(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 89(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 176(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 88(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 174(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 87(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 172(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 86(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 170(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 85(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 168(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 84(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 166(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 83(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 164(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 82(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 162(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s a3, v8
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 81(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 160(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a3
+; RV64ZVFHMIN-NEXT: fle.h a3, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a3, a3, 1
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a3, 64(sp)
+; RV64ZVFHMIN-NEXT: sb a0, 80(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 226(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 113(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 224(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 112(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 222(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 111(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 220(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 110(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 218(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 109(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 216(sp)
+; RV64ZVFHMIN-NEXT: vsetivli zero, 1, e16, m1, ta, ma
+; RV64ZVFHMIN-NEXT: vslidedown.vi v10, v8, 7
+; RV64ZVFHMIN-NEXT: vslidedown.vi v11, v8, 6
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 108(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 214(sp)
+; RV64ZVFHMIN-NEXT: vslidedown.vi v12, v8, 5
+; RV64ZVFHMIN-NEXT: vslidedown.vi v13, v8, 4
+; RV64ZVFHMIN-NEXT: vslidedown.vi v14, v8, 3
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 107(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 212(sp)
+; RV64ZVFHMIN-NEXT: vslidedown.vi v15, v8, 2
+; RV64ZVFHMIN-NEXT: vslidedown.vi v16, v8, 1
+; RV64ZVFHMIN-NEXT: vmv.x.s a3, v10
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 106(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 210(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s a4, v11
+; RV64ZVFHMIN-NEXT: vmv.x.s a5, v12
+; RV64ZVFHMIN-NEXT: vmv.x.s a6, v13
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 105(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 208(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s a7, v14
+; RV64ZVFHMIN-NEXT: vmv.x.s t0, v15
+; RV64ZVFHMIN-NEXT: vmv.x.s t1, v16
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a3
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 104(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 206(sp)
+; RV64ZVFHMIN-NEXT: fle.h a3, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV64ZVFHMIN-NEXT: fle.h a4, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 103(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 204(sp)
+; RV64ZVFHMIN-NEXT: fle.h a5, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV64ZVFHMIN-NEXT: fle.h a6, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a7
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 102(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 202(sp)
+; RV64ZVFHMIN-NEXT: fle.h a7, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV64ZVFHMIN-NEXT: fle.h t0, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 101(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 200(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, t1
+; RV64ZVFHMIN-NEXT: fle.h t1, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a3, a3, 1
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 100(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 198(sp)
+; RV64ZVFHMIN-NEXT: xori a4, a4, 1
+; RV64ZVFHMIN-NEXT: xori a5, a5, 1
+; RV64ZVFHMIN-NEXT: xori a6, a6, 1
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 99(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 196(sp)
+; RV64ZVFHMIN-NEXT: xori a7, a7, 1
+; RV64ZVFHMIN-NEXT: xori t0, t0, 1
+; RV64ZVFHMIN-NEXT: xori t1, t1, 1
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 98(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 194(sp)
+; RV64ZVFHMIN-NEXT: sb t1, 65(sp)
+; RV64ZVFHMIN-NEXT: sb t0, 66(sp)
+; RV64ZVFHMIN-NEXT: sb a7, 67(sp)
+; RV64ZVFHMIN-NEXT: sb a6, 68(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a5, 69(sp)
+; RV64ZVFHMIN-NEXT: sb a4, 70(sp)
+; RV64ZVFHMIN-NEXT: sb a3, 71(sp)
+; RV64ZVFHMIN-NEXT: sb a0, 97(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 254(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 127(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 252(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 126(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 250(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 125(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 248(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 124(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 246(sp)
+; RV64ZVFHMIN-NEXT: vsetivli zero, 1, e16, m2, ta, ma
+; RV64ZVFHMIN-NEXT: vslidedown.vi v10, v8, 15
+; RV64ZVFHMIN-NEXT: vslidedown.vi v12, v8, 14
+; RV64ZVFHMIN-NEXT: vslidedown.vi v14, v8, 13
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 123(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 244(sp)
+; RV64ZVFHMIN-NEXT: vslidedown.vi v16, v8, 12
+; RV64ZVFHMIN-NEXT: vslidedown.vi v18, v8, 11
+; RV64ZVFHMIN-NEXT: vslidedown.vi v20, v8, 10
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 122(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 242(sp)
+; RV64ZVFHMIN-NEXT: vslidedown.vi v22, v8, 9
+; RV64ZVFHMIN-NEXT: vslidedown.vi v8, v8, 8
+; RV64ZVFHMIN-NEXT: vmv.x.s a3, v10
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 121(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 240(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s a4, v12
+; RV64ZVFHMIN-NEXT: vmv.x.s a5, v14
+; RV64ZVFHMIN-NEXT: vmv.x.s a6, v16
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 120(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 238(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s a7, v18
+; RV64ZVFHMIN-NEXT: vmv.x.s t0, v20
+; RV64ZVFHMIN-NEXT: vmv.x.s t1, v22
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 119(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 236(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s t2, v8
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a3
+; RV64ZVFHMIN-NEXT: fle.h a3, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 118(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 234(sp)
+; RV64ZVFHMIN-NEXT: fle.h a4, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV64ZVFHMIN-NEXT: fle.h a5, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 117(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 232(sp)
+; RV64ZVFHMIN-NEXT: fle.h a6, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a7
+; RV64ZVFHMIN-NEXT: fle.h a7, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 116(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 230(sp)
+; RV64ZVFHMIN-NEXT: fle.h t0, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, t1
+; RV64ZVFHMIN-NEXT: fle.h t1, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, t2
+; RV64ZVFHMIN-NEXT: xori a3, a3, 1
+; RV64ZVFHMIN-NEXT: xori a4, a4, 1
+; RV64ZVFHMIN-NEXT: xori a5, a5, 1
+; RV64ZVFHMIN-NEXT: xori a6, a6, 1
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 115(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 228(sp)
+; RV64ZVFHMIN-NEXT: sb a6, 76(sp)
+; RV64ZVFHMIN-NEXT: sb a5, 77(sp)
+; RV64ZVFHMIN-NEXT: sb a4, 78(sp)
+; RV64ZVFHMIN-NEXT: sb a3, 79(sp)
+; RV64ZVFHMIN-NEXT: fle.h a3, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a4, a7, 1
+; RV64ZVFHMIN-NEXT: xori a5, t0, 1
+; RV64ZVFHMIN-NEXT: xori a6, t1, 1
+; RV64ZVFHMIN-NEXT: xori a3, a3, 1
+; RV64ZVFHMIN-NEXT: sb a3, 72(sp)
+; RV64ZVFHMIN-NEXT: sb a6, 73(sp)
+; RV64ZVFHMIN-NEXT: sb a5, 74(sp)
+; RV64ZVFHMIN-NEXT: sb a4, 75(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: fle.h a0, fa0, fa5
+; RV64ZVFHMIN-NEXT: xori a0, a0, 1
+; RV64ZVFHMIN-NEXT: sb a0, 114(sp)
+; RV64ZVFHMIN-NEXT: addi a0, sp, 64
+; RV64ZVFHMIN-NEXT: vsetvli zero, a2, e8, m4, ta, ma
+; RV64ZVFHMIN-NEXT: vle8.v v8, (a0)
+; RV64ZVFHMIN-NEXT: vand.vi v8, v8, 1
+; RV64ZVFHMIN-NEXT: vmsne.vi v12, v8, 0
+; RV64ZVFHMIN-NEXT: vsm.v v12, (a1)
+; RV64ZVFHMIN-NEXT: addi sp, s0, -384
+; RV64ZVFHMIN-NEXT: .cfi_def_cfa sp, 384
+; RV64ZVFHMIN-NEXT: ld ra, 376(sp) # 8-byte Folded Reload
+; RV64ZVFHMIN-NEXT: ld s0, 368(sp) # 8-byte Folded Reload
+; RV64ZVFHMIN-NEXT: .cfi_restore ra
+; RV64ZVFHMIN-NEXT: .cfi_restore s0
+; RV64ZVFHMIN-NEXT: addi sp, sp, 384
+; RV64ZVFHMIN-NEXT: .cfi_def_cfa_offset 0
+; RV64ZVFHMIN-NEXT: ret
%a = load <64 x half>, ptr %x
%b = insertelement <64 x half> poison, half %y, i32 0
%c = shufflevector <64 x half> %b, <64 x half> poison, <64 x i32> zeroinitializer
@@ -1727,6 +5745,616 @@ define void @fcmp_ugt_fv_v64f16_nonans(ptr %x, half %y, ptr %z) {
; ZVFH-NEXT: vmflt.vf v16, v8, fa0
; ZVFH-NEXT: vsm.v v16, (a1)
; ZVFH-NEXT: ret
+;
+; RV32ZVFHMIN-LABEL: fcmp_ugt_fv_v64f16_nonans:
+; RV32ZVFHMIN: # %bb.0:
+; RV32ZVFHMIN-NEXT: addi sp, sp, -384
+; RV32ZVFHMIN-NEXT: .cfi_def_cfa_offset 384
+; RV32ZVFHMIN-NEXT: sw ra, 380(sp) # 4-byte Folded Spill
+; RV32ZVFHMIN-NEXT: sw s0, 376(sp) # 4-byte Folded Spill
+; RV32ZVFHMIN-NEXT: .cfi_offset ra, -4
+; RV32ZVFHMIN-NEXT: .cfi_offset s0, -8
+; RV32ZVFHMIN-NEXT: addi s0, sp, 384
+; RV32ZVFHMIN-NEXT: .cfi_def_cfa s0, 0
+; RV32ZVFHMIN-NEXT: andi sp, sp, -128
+; RV32ZVFHMIN-NEXT: li a2, 64
+; RV32ZVFHMIN-NEXT: vsetvli zero, a2, e16, m8, ta, ma
+; RV32ZVFHMIN-NEXT: vle16.v v8, (a0)
+; RV32ZVFHMIN-NEXT: addi a0, sp, 128
+; RV32ZVFHMIN-NEXT: vse16.v v8, (a0)
+; RV32ZVFHMIN-NEXT: lh a0, 192(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a0, 96(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 190(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a0, 95(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 188(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a0, 94(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 186(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a0, 93(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 184(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a0, 92(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 182(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a0, 91(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 180(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a0, 90(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 178(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a0, 89(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 176(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a0, 88(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 174(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a0, 87(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 172(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a0, 86(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 170(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a0, 85(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 168(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a0, 84(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 166(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a0, 83(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 164(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a0, 82(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 162(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a0, 81(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 160(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s a3, v8
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a3
+; RV32ZVFHMIN-NEXT: flt.h a3, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a3, 64(sp)
+; RV32ZVFHMIN-NEXT: sb a0, 80(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 226(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a0, 113(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 224(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a0, 112(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 222(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a0, 111(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 220(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a0, 110(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 218(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a0, 109(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 216(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a0, 108(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 214(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a0, 107(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 212(sp)
+; RV32ZVFHMIN-NEXT: vsetivli zero, 1, e16, m1, ta, ma
+; RV32ZVFHMIN-NEXT: vslidedown.vi v10, v8, 7
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a0, 106(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 210(sp)
+; RV32ZVFHMIN-NEXT: vslidedown.vi v11, v8, 6
+; RV32ZVFHMIN-NEXT: vslidedown.vi v12, v8, 5
+; RV32ZVFHMIN-NEXT: vslidedown.vi v13, v8, 4
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a0, 105(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 208(sp)
+; RV32ZVFHMIN-NEXT: vslidedown.vi v14, v8, 3
+; RV32ZVFHMIN-NEXT: vslidedown.vi v15, v8, 2
+; RV32ZVFHMIN-NEXT: vslidedown.vi v16, v8, 1
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a0, 104(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 206(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s a3, v10
+; RV32ZVFHMIN-NEXT: vmv.x.s a4, v11
+; RV32ZVFHMIN-NEXT: vmv.x.s a5, v12
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a0, 103(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 204(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s a6, v13
+; RV32ZVFHMIN-NEXT: vmv.x.s a7, v14
+; RV32ZVFHMIN-NEXT: vmv.x.s t0, v15
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a0, 102(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 202(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s t1, v16
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a3
+; RV32ZVFHMIN-NEXT: flt.h a3, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV32ZVFHMIN-NEXT: sb a0, 101(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 200(sp)
+; RV32ZVFHMIN-NEXT: flt.h a4, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV32ZVFHMIN-NEXT: flt.h a5, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV32ZVFHMIN-NEXT: sb a0, 100(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 198(sp)
+; RV32ZVFHMIN-NEXT: flt.h a6, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a7
+; RV32ZVFHMIN-NEXT: flt.h a7, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV32ZVFHMIN-NEXT: sb a0, 99(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 196(sp)
+; RV32ZVFHMIN-NEXT: flt.h t0, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, t1
+; RV32ZVFHMIN-NEXT: flt.h t1, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a0, 98(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 194(sp)
+; RV32ZVFHMIN-NEXT: sb t1, 65(sp)
+; RV32ZVFHMIN-NEXT: sb t0, 66(sp)
+; RV32ZVFHMIN-NEXT: sb a7, 67(sp)
+; RV32ZVFHMIN-NEXT: sb a6, 68(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a5, 69(sp)
+; RV32ZVFHMIN-NEXT: sb a4, 70(sp)
+; RV32ZVFHMIN-NEXT: sb a3, 71(sp)
+; RV32ZVFHMIN-NEXT: sb a0, 97(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 254(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a0, 127(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 252(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a0, 126(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 250(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a0, 125(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 248(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a0, 124(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 246(sp)
+; RV32ZVFHMIN-NEXT: vsetivli zero, 1, e16, m2, ta, ma
+; RV32ZVFHMIN-NEXT: vslidedown.vi v10, v8, 15
+; RV32ZVFHMIN-NEXT: vslidedown.vi v12, v8, 14
+; RV32ZVFHMIN-NEXT: vslidedown.vi v14, v8, 13
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a0, 123(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 244(sp)
+; RV32ZVFHMIN-NEXT: vslidedown.vi v16, v8, 12
+; RV32ZVFHMIN-NEXT: vslidedown.vi v18, v8, 11
+; RV32ZVFHMIN-NEXT: vslidedown.vi v20, v8, 10
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a0, 122(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 242(sp)
+; RV32ZVFHMIN-NEXT: vslidedown.vi v22, v8, 9
+; RV32ZVFHMIN-NEXT: vslidedown.vi v8, v8, 8
+; RV32ZVFHMIN-NEXT: vmv.x.s a3, v10
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a0, 121(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 240(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s a4, v12
+; RV32ZVFHMIN-NEXT: vmv.x.s a5, v14
+; RV32ZVFHMIN-NEXT: vmv.x.s a6, v16
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a0, 120(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 238(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s a7, v18
+; RV32ZVFHMIN-NEXT: vmv.x.s t0, v20
+; RV32ZVFHMIN-NEXT: vmv.x.s t1, v22
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a0, 119(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 236(sp)
+; RV32ZVFHMIN-NEXT: vmv.x.s t2, v8
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a3
+; RV32ZVFHMIN-NEXT: flt.h a3, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV32ZVFHMIN-NEXT: sb a0, 118(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 234(sp)
+; RV32ZVFHMIN-NEXT: flt.h a4, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV32ZVFHMIN-NEXT: flt.h a5, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV32ZVFHMIN-NEXT: sb a0, 117(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 232(sp)
+; RV32ZVFHMIN-NEXT: flt.h a6, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a7
+; RV32ZVFHMIN-NEXT: flt.h a7, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV32ZVFHMIN-NEXT: sb a0, 116(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 230(sp)
+; RV32ZVFHMIN-NEXT: flt.h t0, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, t1
+; RV32ZVFHMIN-NEXT: flt.h t1, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, t2
+; RV32ZVFHMIN-NEXT: sb a0, 115(sp)
+; RV32ZVFHMIN-NEXT: lh a0, 228(sp)
+; RV32ZVFHMIN-NEXT: sb a6, 76(sp)
+; RV32ZVFHMIN-NEXT: sb a5, 77(sp)
+; RV32ZVFHMIN-NEXT: sb a4, 78(sp)
+; RV32ZVFHMIN-NEXT: sb a3, 79(sp)
+; RV32ZVFHMIN-NEXT: flt.h a3, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a3, 72(sp)
+; RV32ZVFHMIN-NEXT: sb t1, 73(sp)
+; RV32ZVFHMIN-NEXT: sb t0, 74(sp)
+; RV32ZVFHMIN-NEXT: sb a7, 75(sp)
+; RV32ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV32ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV32ZVFHMIN-NEXT: sb a0, 114(sp)
+; RV32ZVFHMIN-NEXT: addi a0, sp, 64
+; RV32ZVFHMIN-NEXT: vsetvli zero, a2, e8, m4, ta, ma
+; RV32ZVFHMIN-NEXT: vle8.v v8, (a0)
+; RV32ZVFHMIN-NEXT: vand.vi v8, v8, 1
+; RV32ZVFHMIN-NEXT: vmsne.vi v12, v8, 0
+; RV32ZVFHMIN-NEXT: vsm.v v12, (a1)
+; RV32ZVFHMIN-NEXT: addi sp, s0, -384
+; RV32ZVFHMIN-NEXT: .cfi_def_cfa sp, 384
+; RV32ZVFHMIN-NEXT: lw ra, 380(sp) # 4-byte Folded Reload
+; RV32ZVFHMIN-NEXT: lw s0, 376(sp) # 4-byte Folded Reload
+; RV32ZVFHMIN-NEXT: .cfi_restore ra
+; RV32ZVFHMIN-NEXT: .cfi_restore s0
+; RV32ZVFHMIN-NEXT: addi sp, sp, 384
+; RV32ZVFHMIN-NEXT: .cfi_def_cfa_offset 0
+; RV32ZVFHMIN-NEXT: ret
+;
+; RV64ZVFHMIN-LABEL: fcmp_ugt_fv_v64f16_nonans:
+; RV64ZVFHMIN: # %bb.0:
+; RV64ZVFHMIN-NEXT: addi sp, sp, -384
+; RV64ZVFHMIN-NEXT: .cfi_def_cfa_offset 384
+; RV64ZVFHMIN-NEXT: sd ra, 376(sp) # 8-byte Folded Spill
+; RV64ZVFHMIN-NEXT: sd s0, 368(sp) # 8-byte Folded Spill
+; RV64ZVFHMIN-NEXT: .cfi_offset ra, -8
+; RV64ZVFHMIN-NEXT: .cfi_offset s0, -16
+; RV64ZVFHMIN-NEXT: addi s0, sp, 384
+; RV64ZVFHMIN-NEXT: .cfi_def_cfa s0, 0
+; RV64ZVFHMIN-NEXT: andi sp, sp, -128
+; RV64ZVFHMIN-NEXT: li a2, 64
+; RV64ZVFHMIN-NEXT: vsetvli zero, a2, e16, m8, ta, ma
+; RV64ZVFHMIN-NEXT: vle16.v v8, (a0)
+; RV64ZVFHMIN-NEXT: addi a0, sp, 128
+; RV64ZVFHMIN-NEXT: vse16.v v8, (a0)
+; RV64ZVFHMIN-NEXT: lh a0, 192(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a0, 96(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 190(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a0, 95(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 188(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a0, 94(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 186(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a0, 93(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 184(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a0, 92(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 182(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a0, 91(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 180(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a0, 90(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 178(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a0, 89(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 176(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a0, 88(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 174(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a0, 87(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 172(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a0, 86(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 170(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a0, 85(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 168(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a0, 84(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 166(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a0, 83(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 164(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a0, 82(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 162(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a0, 81(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 160(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s a3, v8
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a3
+; RV64ZVFHMIN-NEXT: flt.h a3, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a3, 64(sp)
+; RV64ZVFHMIN-NEXT: sb a0, 80(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 226(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a0, 113(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 224(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a0, 112(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 222(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a0, 111(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 220(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a0, 110(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 218(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a0, 109(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 216(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a0, 108(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 214(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a0, 107(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 212(sp)
+; RV64ZVFHMIN-NEXT: vsetivli zero, 1, e16, m1, ta, ma
+; RV64ZVFHMIN-NEXT: vslidedown.vi v10, v8, 7
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a0, 106(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 210(sp)
+; RV64ZVFHMIN-NEXT: vslidedown.vi v11, v8, 6
+; RV64ZVFHMIN-NEXT: vslidedown.vi v12, v8, 5
+; RV64ZVFHMIN-NEXT: vslidedown.vi v13, v8, 4
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a0, 105(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 208(sp)
+; RV64ZVFHMIN-NEXT: vslidedown.vi v14, v8, 3
+; RV64ZVFHMIN-NEXT: vslidedown.vi v15, v8, 2
+; RV64ZVFHMIN-NEXT: vslidedown.vi v16, v8, 1
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a0, 104(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 206(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s a3, v10
+; RV64ZVFHMIN-NEXT: vmv.x.s a4, v11
+; RV64ZVFHMIN-NEXT: vmv.x.s a5, v12
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a0, 103(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 204(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s a6, v13
+; RV64ZVFHMIN-NEXT: vmv.x.s a7, v14
+; RV64ZVFHMIN-NEXT: vmv.x.s t0, v15
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a0, 102(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 202(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s t1, v16
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a3
+; RV64ZVFHMIN-NEXT: flt.h a3, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV64ZVFHMIN-NEXT: sb a0, 101(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 200(sp)
+; RV64ZVFHMIN-NEXT: flt.h a4, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV64ZVFHMIN-NEXT: flt.h a5, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV64ZVFHMIN-NEXT: sb a0, 100(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 198(sp)
+; RV64ZVFHMIN-NEXT: flt.h a6, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a7
+; RV64ZVFHMIN-NEXT: flt.h a7, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV64ZVFHMIN-NEXT: sb a0, 99(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 196(sp)
+; RV64ZVFHMIN-NEXT: flt.h t0, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, t1
+; RV64ZVFHMIN-NEXT: flt.h t1, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a0, 98(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 194(sp)
+; RV64ZVFHMIN-NEXT: sb t1, 65(sp)
+; RV64ZVFHMIN-NEXT: sb t0, 66(sp)
+; RV64ZVFHMIN-NEXT: sb a7, 67(sp)
+; RV64ZVFHMIN-NEXT: sb a6, 68(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a5, 69(sp)
+; RV64ZVFHMIN-NEXT: sb a4, 70(sp)
+; RV64ZVFHMIN-NEXT: sb a3, 71(sp)
+; RV64ZVFHMIN-NEXT: sb a0, 97(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 254(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a0, 127(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 252(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a0, 126(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 250(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a0, 125(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 248(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a0, 124(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 246(sp)
+; RV64ZVFHMIN-NEXT: vsetivli zero, 1, e16, m2, ta, ma
+; RV64ZVFHMIN-NEXT: vslidedown.vi v10, v8, 15
+; RV64ZVFHMIN-NEXT: vslidedown.vi v12, v8, 14
+; RV64ZVFHMIN-NEXT: vslidedown.vi v14, v8, 13
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a0, 123(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 244(sp)
+; RV64ZVFHMIN-NEXT: vslidedown.vi v16, v8, 12
+; RV64ZVFHMIN-NEXT: vslidedown.vi v18, v8, 11
+; RV64ZVFHMIN-NEXT: vslidedown.vi v20, v8, 10
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a0, 122(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 242(sp)
+; RV64ZVFHMIN-NEXT: vslidedown.vi v22, v8, 9
+; RV64ZVFHMIN-NEXT: vslidedown.vi v8, v8, 8
+; RV64ZVFHMIN-NEXT: vmv.x.s a3, v10
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a0, 121(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 240(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s a4, v12
+; RV64ZVFHMIN-NEXT: vmv.x.s a5, v14
+; RV64ZVFHMIN-NEXT: vmv.x.s a6, v16
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a0, 120(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 238(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s a7, v18
+; RV64ZVFHMIN-NEXT: vmv.x.s t0, v20
+; RV64ZVFHMIN-NEXT: vmv.x.s t1, v22
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a0, 119(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 236(sp)
+; RV64ZVFHMIN-NEXT: vmv.x.s t2, v8
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a3
+; RV64ZVFHMIN-NEXT: flt.h a3, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a4
+; RV64ZVFHMIN-NEXT: sb a0, 118(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 234(sp)
+; RV64ZVFHMIN-NEXT: flt.h a4, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a5
+; RV64ZVFHMIN-NEXT: flt.h a5, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a6
+; RV64ZVFHMIN-NEXT: sb a0, 117(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 232(sp)
+; RV64ZVFHMIN-NEXT: flt.h a6, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a7
+; RV64ZVFHMIN-NEXT: flt.h a7, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, t0
+; RV64ZVFHMIN-NEXT: sb a0, 116(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 230(sp)
+; RV64ZVFHMIN-NEXT: flt.h t0, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, t1
+; RV64ZVFHMIN-NEXT: flt.h t1, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, t2
+; RV64ZVFHMIN-NEXT: sb a0, 115(sp)
+; RV64ZVFHMIN-NEXT: lh a0, 228(sp)
+; RV64ZVFHMIN-NEXT: sb a6, 76(sp)
+; RV64ZVFHMIN-NEXT: sb a5, 77(sp)
+; RV64ZVFHMIN-NEXT: sb a4, 78(sp)
+; RV64ZVFHMIN-NEXT: sb a3, 79(sp)
+; RV64ZVFHMIN-NEXT: flt.h a3, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a3, 72(sp)
+; RV64ZVFHMIN-NEXT: sb t1, 73(sp)
+; RV64ZVFHMIN-NEXT: sb t0, 74(sp)
+; RV64ZVFHMIN-NEXT: sb a7, 75(sp)
+; RV64ZVFHMIN-NEXT: fmv.h.x fa5, a0
+; RV64ZVFHMIN-NEXT: flt.h a0, fa5, fa0
+; RV64ZVFHMIN-NEXT: sb a0, 114(sp)
+; RV64ZVFHMIN-NEXT: addi a0, sp, 64
+; RV64ZVFHMIN-NEXT: vsetvli zero, a2, e8, m4, ta, ma
+; RV64ZVFHMIN-NEXT: vle8.v v8, (a0)
+; RV64ZVFHMIN-NEXT: vand.vi v8, v8, 1
+; RV64ZVFHMIN-NEXT: vmsne.vi v12, v8, 0
+; RV64ZVFHMIN-NEXT: vsm.v v12, (a1)
+; RV64ZVFHMIN-NEXT: addi sp, s0, -384
+; RV64ZVFHMIN-NEXT: .cfi_def_cfa sp, 384
+; RV64ZVFHMIN-NEXT: ld ra, 376(sp) # 8-byte Folded Reload
+; RV64ZVFHMIN-NEXT: ld s0, 368(sp) # 8-byte Folded Reload
+; RV64ZVFHMIN-NEXT: .cfi_restore ra
+; RV64ZVFHMIN-NEXT: .cfi_restore s0
+; RV64ZVFHMIN-NEXT: addi sp, sp, 384
+; RV64ZVFHMIN-NEXT: .cfi_def_cfa_offset 0
+; RV64ZVFHMIN-NEXT: ret
%a = load <64 x half>, ptr %x
%b = insertelement <64 x half> poison, half %y, i32 0
%c = shufflevector <64 x half> %b, <64 x half> poison, <64 x i32> zeroinitializer
diff --git a/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-shuffle-rotate.ll b/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-shuffle-rotate.ll
index c76aa7c4d317d..5c17283cacd1b 100644
--- a/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-shuffle-rotate.ll
+++ b/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-shuffle-rotate.ll
@@ -3,8 +3,8 @@
; RUN: llc -mtriple=riscv64 -mattr=+v,+zvfh -verify-machineinstrs < %s | FileCheck %s -check-prefixes=CHECK,RV64
; RUN: llc -mtriple=riscv32 -mattr=+v,+zvfh,+zvkb -verify-machineinstrs < %s | FileCheck %s -check-prefixes=ZVKB-V
; RUN: llc -mtriple=riscv64 -mattr=+v,+zvfh,+zvkb -verify-machineinstrs < %s | FileCheck %s -check-prefixes=ZVKB-V
-; RUN: llc -mtriple=riscv32 -mattr=+zve32x,+zvfh,+zvkb,+zvl64b -verify-machineinstrs < %s | FileCheck %s -check-prefixes=ZVKB-ZVE32X
-; RUN: llc -mtriple=riscv64 -mattr=+zve32x,+zvfh,+zvkb,+zvl64b -verify-machineinstrs < %s | FileCheck %s -check-prefixes=ZVKB-ZVE32X
+; RUN: llc -mtriple=riscv32 -mattr=+zve32x,+zvfh,+zvkb,+zvl64b -verify-machineinstrs < %s | FileCheck %s -check-prefixes=ZVKB-ZVE32X,RV32ZVKB-ZVE32X
+; RUN: llc -mtriple=riscv64 -mattr=+zve32x,+zvfh,+zvkb,+zvl64b -verify-machineinstrs < %s | FileCheck %s -check-prefixes=ZVKB-ZVE32X,RV64ZVKB-ZVE32X
define <8 x i1> @shuffle_v8i1_as_i8_1(<8 x i1> %v) {
; CHECK-LABEL: shuffle_v8i1_as_i8_1:
@@ -926,6 +926,136 @@ define <8 x i64> @shuffle_v8i64_as_i128(<8 x i64> %v) {
; ZVKB-V-NEXT: vslideup.vi v12, v8, 1, v0.t
; ZVKB-V-NEXT: vmv.v.v v8, v12
; ZVKB-V-NEXT: ret
+;
+; RV32ZVKB-ZVE32X-LABEL: shuffle_v8i64_as_i128:
+; RV32ZVKB-ZVE32X: # %bb.0:
+; RV32ZVKB-ZVE32X-NEXT: addi sp, sp, -128
+; RV32ZVKB-ZVE32X-NEXT: .cfi_def_cfa_offset 128
+; RV32ZVKB-ZVE32X-NEXT: sw ra, 124(sp) # 4-byte Folded Spill
+; RV32ZVKB-ZVE32X-NEXT: sw s0, 120(sp) # 4-byte Folded Spill
+; RV32ZVKB-ZVE32X-NEXT: sw s2, 116(sp) # 4-byte Folded Spill
+; RV32ZVKB-ZVE32X-NEXT: sw s3, 112(sp) # 4-byte Folded Spill
+; RV32ZVKB-ZVE32X-NEXT: .cfi_offset ra, -4
+; RV32ZVKB-ZVE32X-NEXT: .cfi_offset s0, -8
+; RV32ZVKB-ZVE32X-NEXT: .cfi_offset s2, -12
+; RV32ZVKB-ZVE32X-NEXT: .cfi_offset s3, -16
+; RV32ZVKB-ZVE32X-NEXT: addi s0, sp, 128
+; RV32ZVKB-ZVE32X-NEXT: .cfi_def_cfa s0, 0
+; RV32ZVKB-ZVE32X-NEXT: andi sp, sp, -64
+; RV32ZVKB-ZVE32X-NEXT: lw a2, 0(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw a3, 4(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw a4, 8(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw a5, 12(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw a6, 16(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw a7, 20(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw t0, 24(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw t1, 28(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw t2, 48(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw t3, 52(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw t4, 56(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw t5, 60(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw t6, 32(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw s2, 36(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw s3, 40(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw a1, 44(a1)
+; RV32ZVKB-ZVE32X-NEXT: sw t4, 48(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw t5, 52(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw t2, 56(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw t3, 60(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw s3, 32(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw a1, 36(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw t6, 40(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw s2, 44(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw t0, 16(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw t1, 20(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw a6, 24(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw a7, 28(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw a4, 0(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw a5, 4(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw a2, 8(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw a3, 12(sp)
+; RV32ZVKB-ZVE32X-NEXT: mv a1, sp
+; RV32ZVKB-ZVE32X-NEXT: vsetivli zero, 16, e32, m8, ta, ma
+; RV32ZVKB-ZVE32X-NEXT: vle32.v v8, (a1)
+; RV32ZVKB-ZVE32X-NEXT: vse32.v v8, (a0)
+; RV32ZVKB-ZVE32X-NEXT: addi sp, s0, -128
+; RV32ZVKB-ZVE32X-NEXT: .cfi_def_cfa sp, 128
+; RV32ZVKB-ZVE32X-NEXT: lw ra, 124(sp) # 4-byte Folded Reload
+; RV32ZVKB-ZVE32X-NEXT: lw s0, 120(sp) # 4-byte Folded Reload
+; RV32ZVKB-ZVE32X-NEXT: lw s2, 116(sp) # 4-byte Folded Reload
+; RV32ZVKB-ZVE32X-NEXT: lw s3, 112(sp) # 4-byte Folded Reload
+; RV32ZVKB-ZVE32X-NEXT: .cfi_restore ra
+; RV32ZVKB-ZVE32X-NEXT: .cfi_restore s0
+; RV32ZVKB-ZVE32X-NEXT: .cfi_restore s2
+; RV32ZVKB-ZVE32X-NEXT: .cfi_restore s3
+; RV32ZVKB-ZVE32X-NEXT: addi sp, sp, 128
+; RV32ZVKB-ZVE32X-NEXT: .cfi_def_cfa_offset 0
+; RV32ZVKB-ZVE32X-NEXT: ret
+;
+; RV64ZVKB-ZVE32X-LABEL: shuffle_v8i64_as_i128:
+; RV64ZVKB-ZVE32X: # %bb.0:
+; RV64ZVKB-ZVE32X-NEXT: addi sp, sp, -128
+; RV64ZVKB-ZVE32X-NEXT: .cfi_def_cfa_offset 128
+; RV64ZVKB-ZVE32X-NEXT: sd ra, 120(sp) # 8-byte Folded Spill
+; RV64ZVKB-ZVE32X-NEXT: sd s0, 112(sp) # 8-byte Folded Spill
+; RV64ZVKB-ZVE32X-NEXT: sd s2, 104(sp) # 8-byte Folded Spill
+; RV64ZVKB-ZVE32X-NEXT: sd s3, 96(sp) # 8-byte Folded Spill
+; RV64ZVKB-ZVE32X-NEXT: .cfi_offset ra, -8
+; RV64ZVKB-ZVE32X-NEXT: .cfi_offset s0, -16
+; RV64ZVKB-ZVE32X-NEXT: .cfi_offset s2, -24
+; RV64ZVKB-ZVE32X-NEXT: .cfi_offset s3, -32
+; RV64ZVKB-ZVE32X-NEXT: addi s0, sp, 128
+; RV64ZVKB-ZVE32X-NEXT: .cfi_def_cfa s0, 0
+; RV64ZVKB-ZVE32X-NEXT: andi sp, sp, -64
+; RV64ZVKB-ZVE32X-NEXT: ld a2, 0(a1)
+; RV64ZVKB-ZVE32X-NEXT: ld a3, 8(a1)
+; RV64ZVKB-ZVE32X-NEXT: ld a4, 16(a1)
+; RV64ZVKB-ZVE32X-NEXT: ld a5, 24(a1)
+; RV64ZVKB-ZVE32X-NEXT: ld a6, 32(a1)
+; RV64ZVKB-ZVE32X-NEXT: ld a7, 40(a1)
+; RV64ZVKB-ZVE32X-NEXT: ld t0, 48(a1)
+; RV64ZVKB-ZVE32X-NEXT: ld a1, 56(a1)
+; RV64ZVKB-ZVE32X-NEXT: srli t1, a3, 32
+; RV64ZVKB-ZVE32X-NEXT: srli t2, a2, 32
+; RV64ZVKB-ZVE32X-NEXT: srli t3, a5, 32
+; RV64ZVKB-ZVE32X-NEXT: srli t4, a4, 32
+; RV64ZVKB-ZVE32X-NEXT: srli t5, a7, 32
+; RV64ZVKB-ZVE32X-NEXT: srli t6, a6, 32
+; RV64ZVKB-ZVE32X-NEXT: srli s2, a1, 32
+; RV64ZVKB-ZVE32X-NEXT: srli s3, t0, 32
+; RV64ZVKB-ZVE32X-NEXT: sw a1, 48(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw s2, 52(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw t0, 56(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw s3, 60(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw a7, 32(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw t5, 36(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw a6, 40(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw t6, 44(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw a5, 16(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw t3, 20(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw a4, 24(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw t4, 28(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw a3, 0(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw t1, 4(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw a2, 8(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw t2, 12(sp)
+; RV64ZVKB-ZVE32X-NEXT: mv a1, sp
+; RV64ZVKB-ZVE32X-NEXT: vsetivli zero, 16, e32, m8, ta, ma
+; RV64ZVKB-ZVE32X-NEXT: vle32.v v8, (a1)
+; RV64ZVKB-ZVE32X-NEXT: vse32.v v8, (a0)
+; RV64ZVKB-ZVE32X-NEXT: addi sp, s0, -128
+; RV64ZVKB-ZVE32X-NEXT: .cfi_def_cfa sp, 128
+; RV64ZVKB-ZVE32X-NEXT: ld ra, 120(sp) # 8-byte Folded Reload
+; RV64ZVKB-ZVE32X-NEXT: ld s0, 112(sp) # 8-byte Folded Reload
+; RV64ZVKB-ZVE32X-NEXT: ld s2, 104(sp) # 8-byte Folded Reload
+; RV64ZVKB-ZVE32X-NEXT: ld s3, 96(sp) # 8-byte Folded Reload
+; RV64ZVKB-ZVE32X-NEXT: .cfi_restore ra
+; RV64ZVKB-ZVE32X-NEXT: .cfi_restore s0
+; RV64ZVKB-ZVE32X-NEXT: .cfi_restore s2
+; RV64ZVKB-ZVE32X-NEXT: .cfi_restore s3
+; RV64ZVKB-ZVE32X-NEXT: addi sp, sp, 128
+; RV64ZVKB-ZVE32X-NEXT: .cfi_def_cfa_offset 0
+; RV64ZVKB-ZVE32X-NEXT: ret
%shuffle = shufflevector <8 x i64> %v, <8 x i64> poison, <8 x i32> <i32 1, i32 0, i32 3, i32 2, i32 5, i32 4, i32 7, i32 6>
ret <8 x i64> %shuffle
}
@@ -951,6 +1081,104 @@ define <8 x i64> @shuffle_v8i64_as_i128_2(<8 x i64> %v) {
; ZVKB-V-NEXT: vslideup.vi v12, v8, 1, v0.t
; ZVKB-V-NEXT: vmv.v.v v8, v12
; ZVKB-V-NEXT: ret
+;
+; RV32ZVKB-ZVE32X-LABEL: shuffle_v8i64_as_i128_2:
+; RV32ZVKB-ZVE32X: # %bb.0:
+; RV32ZVKB-ZVE32X-NEXT: addi sp, sp, -128
+; RV32ZVKB-ZVE32X-NEXT: .cfi_def_cfa_offset 128
+; RV32ZVKB-ZVE32X-NEXT: sw ra, 124(sp) # 4-byte Folded Spill
+; RV32ZVKB-ZVE32X-NEXT: sw s0, 120(sp) # 4-byte Folded Spill
+; RV32ZVKB-ZVE32X-NEXT: .cfi_offset ra, -4
+; RV32ZVKB-ZVE32X-NEXT: .cfi_offset s0, -8
+; RV32ZVKB-ZVE32X-NEXT: addi s0, sp, 128
+; RV32ZVKB-ZVE32X-NEXT: .cfi_def_cfa s0, 0
+; RV32ZVKB-ZVE32X-NEXT: andi sp, sp, -64
+; RV32ZVKB-ZVE32X-NEXT: lw a2, 16(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw a3, 20(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw a4, 24(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw a5, 28(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw a6, 48(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw a7, 52(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw t0, 56(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw t1, 60(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw t2, 32(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw t3, 36(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw t4, 40(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw a1, 44(a1)
+; RV32ZVKB-ZVE32X-NEXT: sw t0, 48(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw t1, 52(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw a6, 56(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw a7, 60(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw t4, 32(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw a1, 36(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw t2, 40(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw t3, 44(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw a4, 16(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw a5, 20(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw a2, 24(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw a3, 28(sp)
+; RV32ZVKB-ZVE32X-NEXT: mv a1, sp
+; RV32ZVKB-ZVE32X-NEXT: vsetivli zero, 16, e32, m8, ta, ma
+; RV32ZVKB-ZVE32X-NEXT: vle32.v v8, (a1)
+; RV32ZVKB-ZVE32X-NEXT: vse32.v v8, (a0)
+; RV32ZVKB-ZVE32X-NEXT: addi sp, s0, -128
+; RV32ZVKB-ZVE32X-NEXT: .cfi_def_cfa sp, 128
+; RV32ZVKB-ZVE32X-NEXT: lw ra, 124(sp) # 4-byte Folded Reload
+; RV32ZVKB-ZVE32X-NEXT: lw s0, 120(sp) # 4-byte Folded Reload
+; RV32ZVKB-ZVE32X-NEXT: .cfi_restore ra
+; RV32ZVKB-ZVE32X-NEXT: .cfi_restore s0
+; RV32ZVKB-ZVE32X-NEXT: addi sp, sp, 128
+; RV32ZVKB-ZVE32X-NEXT: .cfi_def_cfa_offset 0
+; RV32ZVKB-ZVE32X-NEXT: ret
+;
+; RV64ZVKB-ZVE32X-LABEL: shuffle_v8i64_as_i128_2:
+; RV64ZVKB-ZVE32X: # %bb.0:
+; RV64ZVKB-ZVE32X-NEXT: addi sp, sp, -128
+; RV64ZVKB-ZVE32X-NEXT: .cfi_def_cfa_offset 128
+; RV64ZVKB-ZVE32X-NEXT: sd ra, 120(sp) # 8-byte Folded Spill
+; RV64ZVKB-ZVE32X-NEXT: sd s0, 112(sp) # 8-byte Folded Spill
+; RV64ZVKB-ZVE32X-NEXT: .cfi_offset ra, -8
+; RV64ZVKB-ZVE32X-NEXT: .cfi_offset s0, -16
+; RV64ZVKB-ZVE32X-NEXT: addi s0, sp, 128
+; RV64ZVKB-ZVE32X-NEXT: .cfi_def_cfa s0, 0
+; RV64ZVKB-ZVE32X-NEXT: andi sp, sp, -64
+; RV64ZVKB-ZVE32X-NEXT: ld a2, 16(a1)
+; RV64ZVKB-ZVE32X-NEXT: ld a3, 24(a1)
+; RV64ZVKB-ZVE32X-NEXT: ld a4, 32(a1)
+; RV64ZVKB-ZVE32X-NEXT: ld a5, 40(a1)
+; RV64ZVKB-ZVE32X-NEXT: ld a6, 48(a1)
+; RV64ZVKB-ZVE32X-NEXT: ld a1, 56(a1)
+; RV64ZVKB-ZVE32X-NEXT: srli a7, a3, 32
+; RV64ZVKB-ZVE32X-NEXT: srli t0, a2, 32
+; RV64ZVKB-ZVE32X-NEXT: srli t1, a5, 32
+; RV64ZVKB-ZVE32X-NEXT: srli t2, a4, 32
+; RV64ZVKB-ZVE32X-NEXT: srli t3, a1, 32
+; RV64ZVKB-ZVE32X-NEXT: srli t4, a6, 32
+; RV64ZVKB-ZVE32X-NEXT: sw a1, 48(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw t3, 52(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw a6, 56(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw t4, 60(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw a5, 32(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw t1, 36(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw a4, 40(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw t2, 44(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw a3, 16(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw a7, 20(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw a2, 24(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw t0, 28(sp)
+; RV64ZVKB-ZVE32X-NEXT: mv a1, sp
+; RV64ZVKB-ZVE32X-NEXT: vsetivli zero, 16, e32, m8, ta, ma
+; RV64ZVKB-ZVE32X-NEXT: vle32.v v8, (a1)
+; RV64ZVKB-ZVE32X-NEXT: vse32.v v8, (a0)
+; RV64ZVKB-ZVE32X-NEXT: addi sp, s0, -128
+; RV64ZVKB-ZVE32X-NEXT: .cfi_def_cfa sp, 128
+; RV64ZVKB-ZVE32X-NEXT: ld ra, 120(sp) # 8-byte Folded Reload
+; RV64ZVKB-ZVE32X-NEXT: ld s0, 112(sp) # 8-byte Folded Reload
+; RV64ZVKB-ZVE32X-NEXT: .cfi_restore ra
+; RV64ZVKB-ZVE32X-NEXT: .cfi_restore s0
+; RV64ZVKB-ZVE32X-NEXT: addi sp, sp, 128
+; RV64ZVKB-ZVE32X-NEXT: .cfi_def_cfa_offset 0
+; RV64ZVKB-ZVE32X-NEXT: ret
%shuffle = shufflevector <8 x i64> %v, <8 x i64> poison, <8 x i32> <i32 poison, i32 poison, i32 3, i32 2, i32 5, i32 4, i32 7, i32 6>
ret <8 x i64> %shuffle
}
@@ -975,6 +1203,136 @@ define <8 x i64> @shuffle_v8i64_as_i256(<8 x i64> %v) {
; ZVKB-V-NEXT: vrgatherei16.vv v12, v8, v16
; ZVKB-V-NEXT: vmv.v.v v8, v12
; ZVKB-V-NEXT: ret
+;
+; RV32ZVKB-ZVE32X-LABEL: shuffle_v8i64_as_i256:
+; RV32ZVKB-ZVE32X: # %bb.0:
+; RV32ZVKB-ZVE32X-NEXT: addi sp, sp, -128
+; RV32ZVKB-ZVE32X-NEXT: .cfi_def_cfa_offset 128
+; RV32ZVKB-ZVE32X-NEXT: sw ra, 124(sp) # 4-byte Folded Spill
+; RV32ZVKB-ZVE32X-NEXT: sw s0, 120(sp) # 4-byte Folded Spill
+; RV32ZVKB-ZVE32X-NEXT: sw s2, 116(sp) # 4-byte Folded Spill
+; RV32ZVKB-ZVE32X-NEXT: sw s3, 112(sp) # 4-byte Folded Spill
+; RV32ZVKB-ZVE32X-NEXT: .cfi_offset ra, -4
+; RV32ZVKB-ZVE32X-NEXT: .cfi_offset s0, -8
+; RV32ZVKB-ZVE32X-NEXT: .cfi_offset s2, -12
+; RV32ZVKB-ZVE32X-NEXT: .cfi_offset s3, -16
+; RV32ZVKB-ZVE32X-NEXT: addi s0, sp, 128
+; RV32ZVKB-ZVE32X-NEXT: .cfi_def_cfa s0, 0
+; RV32ZVKB-ZVE32X-NEXT: andi sp, sp, -64
+; RV32ZVKB-ZVE32X-NEXT: lw a2, 0(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw a3, 4(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw a4, 8(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw a5, 12(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw a6, 16(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw a7, 20(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw t0, 24(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw t1, 28(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw t2, 32(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw t3, 36(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw t4, 40(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw t5, 44(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw t6, 48(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw s2, 52(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw s3, 56(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw a1, 60(a1)
+; RV32ZVKB-ZVE32X-NEXT: sw t2, 48(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw t3, 52(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw s3, 56(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw a1, 60(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw t6, 32(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw s2, 36(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw t4, 40(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw t5, 44(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw a2, 16(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw a3, 20(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw t0, 24(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw t1, 28(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw a6, 0(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw a7, 4(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw a4, 8(sp)
+; RV32ZVKB-ZVE32X-NEXT: sw a5, 12(sp)
+; RV32ZVKB-ZVE32X-NEXT: mv a1, sp
+; RV32ZVKB-ZVE32X-NEXT: vsetivli zero, 16, e32, m8, ta, ma
+; RV32ZVKB-ZVE32X-NEXT: vle32.v v8, (a1)
+; RV32ZVKB-ZVE32X-NEXT: vse32.v v8, (a0)
+; RV32ZVKB-ZVE32X-NEXT: addi sp, s0, -128
+; RV32ZVKB-ZVE32X-NEXT: .cfi_def_cfa sp, 128
+; RV32ZVKB-ZVE32X-NEXT: lw ra, 124(sp) # 4-byte Folded Reload
+; RV32ZVKB-ZVE32X-NEXT: lw s0, 120(sp) # 4-byte Folded Reload
+; RV32ZVKB-ZVE32X-NEXT: lw s2, 116(sp) # 4-byte Folded Reload
+; RV32ZVKB-ZVE32X-NEXT: lw s3, 112(sp) # 4-byte Folded Reload
+; RV32ZVKB-ZVE32X-NEXT: .cfi_restore ra
+; RV32ZVKB-ZVE32X-NEXT: .cfi_restore s0
+; RV32ZVKB-ZVE32X-NEXT: .cfi_restore s2
+; RV32ZVKB-ZVE32X-NEXT: .cfi_restore s3
+; RV32ZVKB-ZVE32X-NEXT: addi sp, sp, 128
+; RV32ZVKB-ZVE32X-NEXT: .cfi_def_cfa_offset 0
+; RV32ZVKB-ZVE32X-NEXT: ret
+;
+; RV64ZVKB-ZVE32X-LABEL: shuffle_v8i64_as_i256:
+; RV64ZVKB-ZVE32X: # %bb.0:
+; RV64ZVKB-ZVE32X-NEXT: addi sp, sp, -128
+; RV64ZVKB-ZVE32X-NEXT: .cfi_def_cfa_offset 128
+; RV64ZVKB-ZVE32X-NEXT: sd ra, 120(sp) # 8-byte Folded Spill
+; RV64ZVKB-ZVE32X-NEXT: sd s0, 112(sp) # 8-byte Folded Spill
+; RV64ZVKB-ZVE32X-NEXT: sd s2, 104(sp) # 8-byte Folded Spill
+; RV64ZVKB-ZVE32X-NEXT: sd s3, 96(sp) # 8-byte Folded Spill
+; RV64ZVKB-ZVE32X-NEXT: .cfi_offset ra, -8
+; RV64ZVKB-ZVE32X-NEXT: .cfi_offset s0, -16
+; RV64ZVKB-ZVE32X-NEXT: .cfi_offset s2, -24
+; RV64ZVKB-ZVE32X-NEXT: .cfi_offset s3, -32
+; RV64ZVKB-ZVE32X-NEXT: addi s0, sp, 128
+; RV64ZVKB-ZVE32X-NEXT: .cfi_def_cfa s0, 0
+; RV64ZVKB-ZVE32X-NEXT: andi sp, sp, -64
+; RV64ZVKB-ZVE32X-NEXT: ld a2, 0(a1)
+; RV64ZVKB-ZVE32X-NEXT: ld a3, 8(a1)
+; RV64ZVKB-ZVE32X-NEXT: ld a4, 16(a1)
+; RV64ZVKB-ZVE32X-NEXT: ld a5, 24(a1)
+; RV64ZVKB-ZVE32X-NEXT: ld a6, 32(a1)
+; RV64ZVKB-ZVE32X-NEXT: ld a7, 40(a1)
+; RV64ZVKB-ZVE32X-NEXT: ld t0, 48(a1)
+; RV64ZVKB-ZVE32X-NEXT: ld a1, 56(a1)
+; RV64ZVKB-ZVE32X-NEXT: srli t1, a4, 32
+; RV64ZVKB-ZVE32X-NEXT: srli t2, a3, 32
+; RV64ZVKB-ZVE32X-NEXT: srli t3, a2, 32
+; RV64ZVKB-ZVE32X-NEXT: srli t4, a5, 32
+; RV64ZVKB-ZVE32X-NEXT: srli t5, t0, 32
+; RV64ZVKB-ZVE32X-NEXT: srli t6, a7, 32
+; RV64ZVKB-ZVE32X-NEXT: srli s2, a6, 32
+; RV64ZVKB-ZVE32X-NEXT: srli s3, a1, 32
+; RV64ZVKB-ZVE32X-NEXT: sw a6, 48(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw s2, 52(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw a1, 56(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw s3, 60(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw t0, 32(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw t5, 36(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw a7, 40(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw t6, 44(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw a2, 16(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw t3, 20(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw a5, 24(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw t4, 28(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw a4, 0(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw t1, 4(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw a3, 8(sp)
+; RV64ZVKB-ZVE32X-NEXT: sw t2, 12(sp)
+; RV64ZVKB-ZVE32X-NEXT: mv a1, sp
+; RV64ZVKB-ZVE32X-NEXT: vsetivli zero, 16, e32, m8, ta, ma
+; RV64ZVKB-ZVE32X-NEXT: vle32.v v8, (a1)
+; RV64ZVKB-ZVE32X-NEXT: vse32.v v8, (a0)
+; RV64ZVKB-ZVE32X-NEXT: addi sp, s0, -128
+; RV64ZVKB-ZVE32X-NEXT: .cfi_def_cfa sp, 128
+; RV64ZVKB-ZVE32X-NEXT: ld ra, 120(sp) # 8-byte Folded Reload
+; RV64ZVKB-ZVE32X-NEXT: ld s0, 112(sp) # 8-byte Folded Reload
+; RV64ZVKB-ZVE32X-NEXT: ld s2, 104(sp) # 8-byte Folded Reload
+; RV64ZVKB-ZVE32X-NEXT: ld s3, 96(sp) # 8-byte Folded Reload
+; RV64ZVKB-ZVE32X-NEXT: .cfi_restore ra
+; RV64ZVKB-ZVE32X-NEXT: .cfi_restore s0
+; RV64ZVKB-ZVE32X-NEXT: .cfi_restore s2
+; RV64ZVKB-ZVE32X-NEXT: .cfi_restore s3
+; RV64ZVKB-ZVE32X-NEXT: addi sp, sp, 128
+; RV64ZVKB-ZVE32X-NEXT: .cfi_def_cfa_offset 0
+; RV64ZVKB-ZVE32X-NEXT: ret
%shuffle = shufflevector <8 x i64> %v, <8 x i64> poison, <8 x i32> <i32 2, i32 1, i32 0, i32 3, i32 6, i32 5, i32 4, i32 7>
ret <8 x i64> %shuffle
}
@@ -1003,6 +1361,106 @@ define <8 x i64> @shuffle_v8i64_as_i256_zvl256b(<8 x i64> %v) vscale_range(4,0)
; ZVKB-V-NEXT: vrgatherei16.vv v10, v8, v12
; ZVKB-V-NEXT: vmv2r.v v8, v10
; ZVKB-V-NEXT: ret
+;
+; RV32ZVKB-ZVE32X-LABEL: shuffle_v8i64_as_i256_zvl256b:
+; RV32ZVKB-ZVE32X: # %bb.0:
+; RV32ZVKB-ZVE32X-NEXT: addi sp, sp, -16
+; RV32ZVKB-ZVE32X-NEXT: .cfi_def_cfa_offset 16
+; RV32ZVKB-ZVE32X-NEXT: sw s0, 12(sp) # 4-byte Folded Spill
+; RV32ZVKB-ZVE32X-NEXT: sw s1, 8(sp) # 4-byte Folded Spill
+; RV32ZVKB-ZVE32X-NEXT: .cfi_offset s0, -4
+; RV32ZVKB-ZVE32X-NEXT: .cfi_offset s1, -8
+; RV32ZVKB-ZVE32X-NEXT: lw a2, 48(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw a3, 52(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw a4, 56(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw a5, 60(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw a6, 32(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw a7, 36(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw t0, 40(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw t1, 44(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw t2, 16(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw t3, 20(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw t4, 24(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw t5, 28(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw t6, 0(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw s0, 4(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw s1, 8(a1)
+; RV32ZVKB-ZVE32X-NEXT: lw a1, 12(a1)
+; RV32ZVKB-ZVE32X-NEXT: vsetivli zero, 16, e32, m2, ta, ma
+; RV32ZVKB-ZVE32X-NEXT: vmv.v.x v8, t2
+; RV32ZVKB-ZVE32X-NEXT: vslide1down.vx v8, v8, t3
+; RV32ZVKB-ZVE32X-NEXT: vslide1down.vx v8, v8, s1
+; RV32ZVKB-ZVE32X-NEXT: vslide1down.vx v8, v8, a1
+; RV32ZVKB-ZVE32X-NEXT: vslide1down.vx v8, v8, t6
+; RV32ZVKB-ZVE32X-NEXT: vslide1down.vx v8, v8, s0
+; RV32ZVKB-ZVE32X-NEXT: vslide1down.vx v8, v8, t4
+; RV32ZVKB-ZVE32X-NEXT: vslide1down.vx v8, v8, t5
+; RV32ZVKB-ZVE32X-NEXT: vslide1down.vx v8, v8, a2
+; RV32ZVKB-ZVE32X-NEXT: vslide1down.vx v8, v8, a3
+; RV32ZVKB-ZVE32X-NEXT: vslide1down.vx v8, v8, t0
+; RV32ZVKB-ZVE32X-NEXT: vslide1down.vx v8, v8, t1
+; RV32ZVKB-ZVE32X-NEXT: vslide1down.vx v8, v8, a6
+; RV32ZVKB-ZVE32X-NEXT: vslide1down.vx v8, v8, a7
+; RV32ZVKB-ZVE32X-NEXT: vslide1down.vx v8, v8, a4
+; RV32ZVKB-ZVE32X-NEXT: vslide1down.vx v8, v8, a5
+; RV32ZVKB-ZVE32X-NEXT: vse32.v v8, (a0)
+; RV32ZVKB-ZVE32X-NEXT: lw s0, 12(sp) # 4-byte Folded Reload
+; RV32ZVKB-ZVE32X-NEXT: lw s1, 8(sp) # 4-byte Folded Reload
+; RV32ZVKB-ZVE32X-NEXT: .cfi_restore s0
+; RV32ZVKB-ZVE32X-NEXT: .cfi_restore s1
+; RV32ZVKB-ZVE32X-NEXT: addi sp, sp, 16
+; RV32ZVKB-ZVE32X-NEXT: .cfi_def_cfa_offset 0
+; RV32ZVKB-ZVE32X-NEXT: ret
+;
+; RV64ZVKB-ZVE32X-LABEL: shuffle_v8i64_as_i256_zvl256b:
+; RV64ZVKB-ZVE32X: # %bb.0:
+; RV64ZVKB-ZVE32X-NEXT: addi sp, sp, -16
+; RV64ZVKB-ZVE32X-NEXT: .cfi_def_cfa_offset 16
+; RV64ZVKB-ZVE32X-NEXT: sd s0, 8(sp) # 8-byte Folded Spill
+; RV64ZVKB-ZVE32X-NEXT: sd s1, 0(sp) # 8-byte Folded Spill
+; RV64ZVKB-ZVE32X-NEXT: .cfi_offset s0, -8
+; RV64ZVKB-ZVE32X-NEXT: .cfi_offset s1, -16
+; RV64ZVKB-ZVE32X-NEXT: ld a2, 32(a1)
+; RV64ZVKB-ZVE32X-NEXT: ld a3, 40(a1)
+; RV64ZVKB-ZVE32X-NEXT: ld a4, 48(a1)
+; RV64ZVKB-ZVE32X-NEXT: ld a5, 56(a1)
+; RV64ZVKB-ZVE32X-NEXT: ld a6, 0(a1)
+; RV64ZVKB-ZVE32X-NEXT: ld a7, 8(a1)
+; RV64ZVKB-ZVE32X-NEXT: ld t0, 16(a1)
+; RV64ZVKB-ZVE32X-NEXT: ld a1, 24(a1)
+; RV64ZVKB-ZVE32X-NEXT: srli t1, a5, 32
+; RV64ZVKB-ZVE32X-NEXT: srli t2, a2, 32
+; RV64ZVKB-ZVE32X-NEXT: srli t3, a3, 32
+; RV64ZVKB-ZVE32X-NEXT: srli t4, a4, 32
+; RV64ZVKB-ZVE32X-NEXT: srli t5, a1, 32
+; RV64ZVKB-ZVE32X-NEXT: srli t6, a6, 32
+; RV64ZVKB-ZVE32X-NEXT: srli s0, a7, 32
+; RV64ZVKB-ZVE32X-NEXT: srli s1, t0, 32
+; RV64ZVKB-ZVE32X-NEXT: vsetivli zero, 16, e32, m2, ta, ma
+; RV64ZVKB-ZVE32X-NEXT: vmv.v.x v8, t0
+; RV64ZVKB-ZVE32X-NEXT: vslide1down.vx v8, v8, s1
+; RV64ZVKB-ZVE32X-NEXT: vslide1down.vx v8, v8, a7
+; RV64ZVKB-ZVE32X-NEXT: vslide1down.vx v8, v8, s0
+; RV64ZVKB-ZVE32X-NEXT: vslide1down.vx v8, v8, a6
+; RV64ZVKB-ZVE32X-NEXT: vslide1down.vx v8, v8, t6
+; RV64ZVKB-ZVE32X-NEXT: vslide1down.vx v8, v8, a1
+; RV64ZVKB-ZVE32X-NEXT: vslide1down.vx v8, v8, t5
+; RV64ZVKB-ZVE32X-NEXT: vslide1down.vx v8, v8, a4
+; RV64ZVKB-ZVE32X-NEXT: vslide1down.vx v8, v8, t4
+; RV64ZVKB-ZVE32X-NEXT: vslide1down.vx v8, v8, a3
+; RV64ZVKB-ZVE32X-NEXT: vslide1down.vx v8, v8, t3
+; RV64ZVKB-ZVE32X-NEXT: vslide1down.vx v8, v8, a2
+; RV64ZVKB-ZVE32X-NEXT: vslide1down.vx v8, v8, t2
+; RV64ZVKB-ZVE32X-NEXT: vslide1down.vx v8, v8, a5
+; RV64ZVKB-ZVE32X-NEXT: vslide1down.vx v8, v8, t1
+; RV64ZVKB-ZVE32X-NEXT: vse32.v v8, (a0)
+; RV64ZVKB-ZVE32X-NEXT: ld s0, 8(sp) # 8-byte Folded Reload
+; RV64ZVKB-ZVE32X-NEXT: ld s1, 0(sp) # 8-byte Folded Reload
+; RV64ZVKB-ZVE32X-NEXT: .cfi_restore s0
+; RV64ZVKB-ZVE32X-NEXT: .cfi_restore s1
+; RV64ZVKB-ZVE32X-NEXT: addi sp, sp, 16
+; RV64ZVKB-ZVE32X-NEXT: .cfi_def_cfa_offset 0
+; RV64ZVKB-ZVE32X-NEXT: ret
%shuffle = shufflevector <8 x i64> %v, <8 x i64> poison, <8 x i32> <i32 2, i32 1, i32 0, i32 3, i32 6, i32 5, i32 4, i32 7>
ret <8 x i64> %shuffle
}
diff --git a/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-vfw-web-simplification.ll b/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-vfw-web-simplification.ll
index 5aa3a246d7616..0561ee9addc7b 100644
--- a/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-vfw-web-simplification.ll
+++ b/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-vfw-web-simplification.ll
@@ -5,7 +5,7 @@
; RUN: llc -mtriple=riscv64 -mattr=+v,+zvfhmin,+f,+d -verify-machineinstrs %s -o - --riscv-lower-ext-max-web-size=3 | FileCheck %s --check-prefixes=FOLDING,ZVFHMIN
; Check that the default value enables the web folding and
; that it is bigger than 3.
-; RUN: llc -mtriple=riscv64 -mattr=+v,+zvfh,+f,+d -verify-machineinstrs %s -o - | FileCheck %s --check-prefixes=FOLDING
+; RUN: llc -mtriple=riscv64 -mattr=+v,+zvfh,+f,+d -verify-machineinstrs %s -o - | FileCheck %s --check-prefixes=FOLDING,ZVFH
define void @vfwmul_v2f116_multiple_users(ptr %x, ptr %y, ptr %z, <2 x half> %a, <2 x half> %b, <2 x half> %b2) {
; NO_FOLDING1-LABEL: vfwmul_v2f116_multiple_users:
diff --git a/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-vwmulu.ll b/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-vwmulu.ll
index 90e9ffdcb320a..eeb232ec1555c 100644
--- a/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-vwmulu.ll
+++ b/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-vwmulu.ll
@@ -1,6 +1,6 @@
; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
-; RUN: llc -mtriple=riscv32 -mattr=+v -verify-machineinstrs < %s | FileCheck %s
-; RUN: llc -mtriple=riscv64 -mattr=+v -verify-machineinstrs < %s | FileCheck %s
+; RUN: llc -mtriple=riscv32 -mattr=+v -verify-machineinstrs < %s | FileCheck -check-prefixes=CHECK,RV32 %s
+; RUN: llc -mtriple=riscv64 -mattr=+v -verify-machineinstrs < %s | FileCheck -check-prefixes=CHECK,RV64 %s
define <2 x i16> @vwmulu_v2i16(ptr %x, ptr %y) {
; CHECK-LABEL: vwmulu_v2i16:
@@ -750,28 +750,25 @@ define <2 x i64> @vwmulu_vx_v2i64_i8(ptr %x, ptr %y) {
; RV32: # %bb.0:
; RV32-NEXT: addi sp, sp, -16
; RV32-NEXT: .cfi_def_cfa_offset 16
-; RV32-NEXT: vsetivli zero, 2, e32, mf2, ta, mu
-; RV32-NEXT: lb a1, 0(a1)
-; RV32-NEXT: vle32.v v25, (a0)
-; RV32-NEXT: srai a0, a1, 31
+; RV32-NEXT: lbu a1, 0(a1)
+; RV32-NEXT: vsetivli zero, 2, e64, m1, ta, ma
+; RV32-NEXT: vle32.v v8, (a0)
; RV32-NEXT: sw a1, 8(sp)
-; RV32-NEXT: sw a0, 12(sp)
+; RV32-NEXT: sw zero, 12(sp)
; RV32-NEXT: addi a0, sp, 8
-; RV32-NEXT: vlse64.v v26, (a0), zero
-; RV32-NEXT: vsetvli zero, zero, e64, m1, ta, mu
-; RV32-NEXT: vzext.vf2 v27, v25
-; RV32-NEXT: vmul.vv v8, v26, v27
+; RV32-NEXT: vlse64.v v9, (a0), zero
+; RV32-NEXT: vzext.vf2 v10, v8
+; RV32-NEXT: vmul.vv v8, v9, v10
; RV32-NEXT: addi sp, sp, 16
+; RV32-NEXT: .cfi_def_cfa_offset 0
; RV32-NEXT: ret
;
; RV64-LABEL: vwmulu_vx_v2i64_i8:
; RV64: # %bb.0:
-; RV64-NEXT: vsetivli zero, 2, e32, mf2, ta, mu
-; RV64-NEXT: vle32.v v25, (a0)
-; RV64-NEXT: lb a0, 0(a1)
-; RV64-NEXT: vsetvli zero, zero, e64, m1, ta, mu
-; RV64-NEXT: vzext.vf2 v26, v25
-; RV64-NEXT: vmul.vx v8, v26, a0
+; RV64-NEXT: vsetivli zero, 2, e32, mf2, ta, ma
+; RV64-NEXT: vle32.v v9, (a0)
+; RV64-NEXT: lbu a0, 0(a1)
+; RV64-NEXT: vwmulu.vx v8, v9, a0
; RV64-NEXT: ret
%a = load <2 x i32>, ptr %x
%b = load i8, ptr %y
@@ -788,28 +785,25 @@ define <2 x i64> @vwmulu_vx_v2i64_i16(ptr %x, ptr %y) {
; RV32: # %bb.0:
; RV32-NEXT: addi sp, sp, -16
; RV32-NEXT: .cfi_def_cfa_offset 16
-; RV32-NEXT: vsetivli zero, 2, e32, mf2, ta, mu
-; RV32-NEXT: lh a1, 0(a1)
-; RV32-NEXT: vle32.v v25, (a0)
-; RV32-NEXT: srai a0, a1, 31
+; RV32-NEXT: lhu a1, 0(a1)
+; RV32-NEXT: vsetivli zero, 2, e64, m1, ta, ma
+; RV32-NEXT: vle32.v v8, (a0)
; RV32-NEXT: sw a1, 8(sp)
-; RV32-NEXT: sw a0, 12(sp)
+; RV32-NEXT: sw zero, 12(sp)
; RV32-NEXT: addi a0, sp, 8
-; RV32-NEXT: vlse64.v v26, (a0), zero
-; RV32-NEXT: vsetvli zero, zero, e64, m1, ta, mu
-; RV32-NEXT: vzext.vf2 v27, v25
-; RV32-NEXT: vmul.vv v8, v26, v27
+; RV32-NEXT: vlse64.v v9, (a0), zero
+; RV32-NEXT: vzext.vf2 v10, v8
+; RV32-NEXT: vmul.vv v8, v9, v10
; RV32-NEXT: addi sp, sp, 16
+; RV32-NEXT: .cfi_def_cfa_offset 0
; RV32-NEXT: ret
;
; RV64-LABEL: vwmulu_vx_v2i64_i16:
; RV64: # %bb.0:
-; RV64-NEXT: vsetivli zero, 2, e32, mf2, ta, mu
-; RV64-NEXT: vle32.v v25, (a0)
-; RV64-NEXT: lh a0, 0(a1)
-; RV64-NEXT: vsetvli zero, zero, e64, m1, ta, mu
-; RV64-NEXT: vzext.vf2 v26, v25
-; RV64-NEXT: vmul.vx v8, v26, a0
+; RV64-NEXT: vsetivli zero, 2, e32, mf2, ta, ma
+; RV64-NEXT: vle32.v v9, (a0)
+; RV64-NEXT: lhu a0, 0(a1)
+; RV64-NEXT: vwmulu.vx v8, v9, a0
; RV64-NEXT: ret
%a = load <2 x i32>, ptr %x
%b = load i16, ptr %y
@@ -826,28 +820,25 @@ define <2 x i64> @vwmulu_vx_v2i64_i32(ptr %x, ptr %y) {
; RV32: # %bb.0:
; RV32-NEXT: addi sp, sp, -16
; RV32-NEXT: .cfi_def_cfa_offset 16
-; RV32-NEXT: vsetivli zero, 2, e32, mf2, ta, mu
; RV32-NEXT: lw a1, 0(a1)
-; RV32-NEXT: vle32.v v25, (a0)
-; RV32-NEXT: srai a0, a1, 31
+; RV32-NEXT: vsetivli zero, 2, e64, m1, ta, ma
+; RV32-NEXT: vle32.v v8, (a0)
; RV32-NEXT: sw a1, 8(sp)
-; RV32-NEXT: sw a0, 12(sp)
+; RV32-NEXT: sw zero, 12(sp)
; RV32-NEXT: addi a0, sp, 8
-; RV32-NEXT: vlse64.v v26, (a0), zero
-; RV32-NEXT: vsetvli zero, zero, e64, m1, ta, mu
-; RV32-NEXT: vzext.vf2 v27, v25
-; RV32-NEXT: vmul.vv v8, v26, v27
+; RV32-NEXT: vlse64.v v9, (a0), zero
+; RV32-NEXT: vzext.vf2 v10, v8
+; RV32-NEXT: vmul.vv v8, v9, v10
; RV32-NEXT: addi sp, sp, 16
+; RV32-NEXT: .cfi_def_cfa_offset 0
; RV32-NEXT: ret
;
; RV64-LABEL: vwmulu_vx_v2i64_i32:
; RV64: # %bb.0:
-; RV64-NEXT: vsetivli zero, 2, e32, mf2, ta, mu
-; RV64-NEXT: vle32.v v25, (a0)
+; RV64-NEXT: vsetivli zero, 2, e32, mf2, ta, ma
+; RV64-NEXT: vle32.v v9, (a0)
; RV64-NEXT: lw a0, 0(a1)
-; RV64-NEXT: vsetvli zero, zero, e64, m1, ta, mu
-; RV64-NEXT: vzext.vf2 v26, v25
-; RV64-NEXT: vmul.vx v8, v26, a0
+; RV64-NEXT: vwmulu.vx v8, v9, a0
; RV64-NEXT: ret
%a = load <2 x i32>, ptr %x
%b = load i32, ptr %y
@@ -864,28 +855,27 @@ define <2 x i64> @vwmulu_vx_v2i64_i64(ptr %x, ptr %y) {
; RV32: # %bb.0:
; RV32-NEXT: addi sp, sp, -16
; RV32-NEXT: .cfi_def_cfa_offset 16
-; RV32-NEXT: vsetivli zero, 2, e32, mf2, ta, mu
-; RV32-NEXT: lw a2, 4(a1)
-; RV32-NEXT: lw a1, 0(a1)
-; RV32-NEXT: vle32.v v25, (a0)
-; RV32-NEXT: sw a2, 12(sp)
-; RV32-NEXT: sw a1, 8(sp)
+; RV32-NEXT: lw a2, 0(a1)
+; RV32-NEXT: lw a1, 4(a1)
+; RV32-NEXT: vsetivli zero, 2, e64, m1, ta, ma
+; RV32-NEXT: vle32.v v8, (a0)
+; RV32-NEXT: sw a2, 8(sp)
+; RV32-NEXT: sw a1, 12(sp)
; RV32-NEXT: addi a0, sp, 8
-; RV32-NEXT: vlse64.v v26, (a0), zero
-; RV32-NEXT: vsetvli zero, zero, e64, m1, ta, mu
-; RV32-NEXT: vzext.vf2 v27, v25
-; RV32-NEXT: vmul.vv v8, v26, v27
+; RV32-NEXT: vlse64.v v9, (a0), zero
+; RV32-NEXT: vzext.vf2 v10, v8
+; RV32-NEXT: vmul.vv v8, v9, v10
; RV32-NEXT: addi sp, sp, 16
+; RV32-NEXT: .cfi_def_cfa_offset 0
; RV32-NEXT: ret
;
; RV64-LABEL: vwmulu_vx_v2i64_i64:
; RV64: # %bb.0:
-; RV64-NEXT: vsetivli zero, 2, e32, mf2, ta, mu
-; RV64-NEXT: vle32.v v25, (a0)
+; RV64-NEXT: vsetivli zero, 2, e64, m1, ta, ma
+; RV64-NEXT: vle32.v v8, (a0)
; RV64-NEXT: ld a0, 0(a1)
-; RV64-NEXT: vsetvli zero, zero, e64, m1, ta, mu
-; RV64-NEXT: vzext.vf2 v26, v25
-; RV64-NEXT: vmul.vx v8, v26, a0
+; RV64-NEXT: vzext.vf2 v9, v8
+; RV64-NEXT: vmul.vx v8, v9, a0
; RV64-NEXT: ret
%a = load <2 x i32>, ptr %x
%b = load i64, ptr %y
diff --git a/llvm/test/CodeGen/RISCV/rvv/vmerge.ll b/llvm/test/CodeGen/RISCV/rvv/vmerge.ll
index ef1de87b3b8b7..3fb5aa02230b4 100644
--- a/llvm/test/CodeGen/RISCV/rvv/vmerge.ll
+++ b/llvm/test/CodeGen/RISCV/rvv/vmerge.ll
@@ -1,8 +1,8 @@
; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=riscv32 -mattr=+v,+zvfhmin,+zvfbfmin \
-; RUN: -verify-machineinstrs -target-abi=ilp32d | FileCheck %s
+; RUN: -verify-machineinstrs -target-abi=ilp32d | FileCheck -check-prefixes=CHECK,RV32 %s
; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -mattr=+v,+zvfhmin,+zvfbfmin \
-; RUN: -verify-machineinstrs -target-abi=lp64d | FileCheck %s
+; RUN: -verify-machineinstrs -target-abi=lp64d | FileCheck -check-prefixes=CHECK,RV64 %s
declare <vscale x 1 x i8> @llvm.riscv.vmerge.nxv1i8.nxv1i8(
<vscale x 1 x i8>,
@@ -972,6 +972,22 @@ declare <vscale x 1 x i64> @llvm.riscv.vmerge.nxv1i64.i64(
iXLen);
define <vscale x 1 x i64> @intrinsic_vmerge_vxm_nxv1i64_nxv1i64_i64(<vscale x 1 x i64> %0, i64 %1, <vscale x 1 x i1> %2, iXLen %3) nounwind {
+; RV32-LABEL: intrinsic_vmerge_vxm_nxv1i64_nxv1i64_i64:
+; RV32: # %bb.0: # %entry
+; RV32-NEXT: addi sp, sp, -16
+; RV32-NEXT: sw a0, 8(sp)
+; RV32-NEXT: sw a1, 12(sp)
+; RV32-NEXT: addi a0, sp, 8
+; RV32-NEXT: vsetvli zero, a2, e64, m1, ta, mu
+; RV32-NEXT: vlse64.v v8, (a0), zero, v0.t
+; RV32-NEXT: addi sp, sp, 16
+; RV32-NEXT: ret
+;
+; RV64-LABEL: intrinsic_vmerge_vxm_nxv1i64_nxv1i64_i64:
+; RV64: # %bb.0: # %entry
+; RV64-NEXT: vsetvli zero, a1, e64, m1, ta, ma
+; RV64-NEXT: vmerge.vxm v8, v8, a0, v0
+; RV64-NEXT: ret
entry:
%a = call <vscale x 1 x i64> @llvm.riscv.vmerge.nxv1i64.i64(
<vscale x 1 x i64> poison,
@@ -991,6 +1007,22 @@ declare <vscale x 2 x i64> @llvm.riscv.vmerge.nxv2i64.i64(
iXLen);
define <vscale x 2 x i64> @intrinsic_vmerge_vxm_nxv2i64_nxv2i64_i64(<vscale x 2 x i64> %0, i64 %1, <vscale x 2 x i1> %2, iXLen %3) nounwind {
+; RV32-LABEL: intrinsic_vmerge_vxm_nxv2i64_nxv2i64_i64:
+; RV32: # %bb.0: # %entry
+; RV32-NEXT: addi sp, sp, -16
+; RV32-NEXT: sw a0, 8(sp)
+; RV32-NEXT: sw a1, 12(sp)
+; RV32-NEXT: addi a0, sp, 8
+; RV32-NEXT: vsetvli zero, a2, e64, m2, ta, mu
+; RV32-NEXT: vlse64.v v8, (a0), zero, v0.t
+; RV32-NEXT: addi sp, sp, 16
+; RV32-NEXT: ret
+;
+; RV64-LABEL: intrinsic_vmerge_vxm_nxv2i64_nxv2i64_i64:
+; RV64: # %bb.0: # %entry
+; RV64-NEXT: vsetvli zero, a1, e64, m2, ta, ma
+; RV64-NEXT: vmerge.vxm v8, v8, a0, v0
+; RV64-NEXT: ret
entry:
%a = call <vscale x 2 x i64> @llvm.riscv.vmerge.nxv2i64.i64(
<vscale x 2 x i64> poison,
@@ -1010,6 +1042,22 @@ declare <vscale x 4 x i64> @llvm.riscv.vmerge.nxv4i64.i64(
iXLen);
define <vscale x 4 x i64> @intrinsic_vmerge_vxm_nxv4i64_nxv4i64_i64(<vscale x 4 x i64> %0, i64 %1, <vscale x 4 x i1> %2, iXLen %3) nounwind {
+; RV32-LABEL: intrinsic_vmerge_vxm_nxv4i64_nxv4i64_i64:
+; RV32: # %bb.0: # %entry
+; RV32-NEXT: addi sp, sp, -16
+; RV32-NEXT: sw a0, 8(sp)
+; RV32-NEXT: sw a1, 12(sp)
+; RV32-NEXT: addi a0, sp, 8
+; RV32-NEXT: vsetvli zero, a2, e64, m4, ta, mu
+; RV32-NEXT: vlse64.v v8, (a0), zero, v0.t
+; RV32-NEXT: addi sp, sp, 16
+; RV32-NEXT: ret
+;
+; RV64-LABEL: intrinsic_vmerge_vxm_nxv4i64_nxv4i64_i64:
+; RV64: # %bb.0: # %entry
+; RV64-NEXT: vsetvli zero, a1, e64, m4, ta, ma
+; RV64-NEXT: vmerge.vxm v8, v8, a0, v0
+; RV64-NEXT: ret
entry:
%a = call <vscale x 4 x i64> @llvm.riscv.vmerge.nxv4i64.i64(
<vscale x 4 x i64> poison,
@@ -1029,6 +1077,22 @@ declare <vscale x 8 x i64> @llvm.riscv.vmerge.nxv8i64.i64(
iXLen);
define <vscale x 8 x i64> @intrinsic_vmerge_vxm_nxv8i64_nxv8i64_i64(<vscale x 8 x i64> %0, i64 %1, <vscale x 8 x i1> %2, iXLen %3) nounwind {
+; RV32-LABEL: intrinsic_vmerge_vxm_nxv8i64_nxv8i64_i64:
+; RV32: # %bb.0: # %entry
+; RV32-NEXT: addi sp, sp, -16
+; RV32-NEXT: sw a0, 8(sp)
+; RV32-NEXT: sw a1, 12(sp)
+; RV32-NEXT: addi a0, sp, 8
+; RV32-NEXT: vsetvli zero, a2, e64, m8, ta, mu
+; RV32-NEXT: vlse64.v v8, (a0), zero, v0.t
+; RV32-NEXT: addi sp, sp, 16
+; RV32-NEXT: ret
+;
+; RV64-LABEL: intrinsic_vmerge_vxm_nxv8i64_nxv8i64_i64:
+; RV64: # %bb.0: # %entry
+; RV64-NEXT: vsetvli zero, a1, e64, m8, ta, ma
+; RV64-NEXT: vmerge.vxm v8, v8, a0, v0
+; RV64-NEXT: ret
entry:
%a = call <vscale x 8 x i64> @llvm.riscv.vmerge.nxv8i64.i64(
<vscale x 8 x i64> poison,
More information about the llvm-commits
mailing list